[ { "hash": "7d4fc577dac461a0d87361e3074cb2a7f3b43a65", "msg": "numarray merge, continued.", "author": { "name": "jmiller", "email": "jmiller@localhost" }, "committer": { "name": "jmiller", "email": "jmiller@localhost" }, "author_date": "2005-01-11T16:18:44+00:00", "author_timezone": 0, "committer_date": "2005-01-11T16:18:44+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "ab288fa1426e8a48bee09c517c90964882ddbef1" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 10, "insertions": 6, "lines": 16, "files": 1, "dmm_unit_size": null, "dmm_unit_complexity": null, "dmm_unit_interfacing": null, "modified_files": [ { "old_path": "scipy_test/testing.py", "new_path": "scipy_test/testing.py", "filename": "testing.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -34,15 +34,11 @@\n import types\n import imp\n \n-try:\n- # These are used by Numeric tests.\n- # If Numeric and scipy_base are not available, then some of the\n- # functions below will not be available.\n- from Numeric import alltrue,equal,shape,ravel,around,zeros,Float64,asarray,\\\n- less_equal,array2string,less,ArrayType\n- # `import scipy_base.fastumath as math` must be at the end of this file.\n-except ImportError,msg:\n- print msg\n+# These are used by Numeric tests.\n+# If Numeric and scipy_base are not available, then some of the\n+# functions below will not be available.\n+from scipy_base.numerix import alltrue, equal, shape, ravel, around, zeros, Float64, asarray\n+from scipy_base.numerix import less_equal, array2string, less, ArrayType\n \n DEBUG = 0\n \n@@ -798,7 +794,7 @@ def output_exception():\n type = value = tb = None # clean up\n \n try:\n- import scipy_base.fastumath as math\n+ from scipy_base.numerix import fastumath as math\n except ImportError,msg:\n print msg\n import math\n", "added_lines": 6, "deleted_lines": 10, "source_code": "\"\"\"\nUnit-testing\n------------\n\n ScipyTest -- Scipy tests site manager\n ScipyTestCase -- unittest.TestCase with measure method\n IgnoreException -- raise when checking disabled feature ('ignoring' is displayed)\n set_package_path -- prepend package build directory to path\n set_local_path -- prepend local directory (to tests files) to path\n restore_path -- restore path after set_package_path\n\nTiming tools\n------------\n\n jiffies -- return 1/100ths of a second that the current process has used\n memusage -- virtual memory size in bytes of the running python [linux]\n\nUtility functions\n-----------------\n\n assert_equal -- assert equality\n assert_almost_equal -- assert equality with decimal tolerance\n assert_approx_equal -- assert equality with significant digits tolerance\n assert_array_equal -- assert arrays equality\n assert_array_almost_equal -- assert arrays equality with decimal tolerance\n assert_array_less -- assert arrays less-ordering\n rand -- array of random numbers from given shape\n\n\"\"\"\n\n__all__ = []\n\nimport os,sys,time,glob,string,traceback,unittest\nimport types\nimport imp\n\n# These are used by Numeric tests.\n# If Numeric and scipy_base are not available, then some of the\n# functions below will not be available.\nfrom scipy_base.numerix import alltrue, equal, shape, ravel, around, zeros, Float64, asarray\nfrom scipy_base.numerix import less_equal, array2string, less, ArrayType\n\nDEBUG = 0\n\n__all__.append('set_package_path')\ndef set_package_path(level=1):\n \"\"\" Prepend package directory to sys.path.\n\n set_package_path should be called from a test_file.py that\n satisfies the following tree structure:\n\n //test_file.py\n\n Then the first existing path name from the following list\n\n /build/lib.-\n /..\n\n is prepended to sys.path.\n The caller is responsible for removing this path by using\n\n restore_path()\n \"\"\"\n from distutils.util import get_platform\n from scipy_distutils.misc_util import get_frame\n f = get_frame(level)\n if f.f_locals['__name__']=='__main__':\n testfile = sys.argv[0]\n else:\n testfile = f.f_locals['__file__']\n d = os.path.dirname(os.path.dirname(os.path.abspath(testfile)))\n d1 = os.path.join(d,'build','lib.%s-%s'%(get_platform(),sys.version[:3]))\n if not os.path.isdir(d1):\n d1 = os.path.dirname(d)\n if DEBUG:\n print 'Inserting %r to sys.path' % (d1)\n sys.path.insert(0,d1)\n\n__all__.append('set_local_path')\ndef set_local_path(reldir='', level=1):\n \"\"\" Prepend local directory to sys.path.\n\n The caller is responsible for removing this path by using\n\n restore_path()\n \"\"\"\n from scipy_distutils.misc_util import get_frame\n f = get_frame(level)\n if f.f_locals['__name__']=='__main__':\n testfile = sys.argv[0]\n else:\n testfile = f.f_locals['__file__']\n local_path = os.path.join(os.path.dirname(os.path.abspath(testfile)),reldir)\n if DEBUG:\n print 'Inserting %r to sys.path' % (local_path)\n sys.path.insert(0,local_path)\n\n__all__.append('restore_path')\ndef restore_path():\n if DEBUG:\n print 'Removing %r from sys.path' % (sys.path[0])\n del sys.path[0]\n\n__all__.extend(['jiffies','memusage'])\nif sys.platform[:5]=='linux':\n def jiffies(_proc_pid_stat = '/proc/%s/stat'%(os.getpid()),\n _load_time=time.time()):\n \"\"\" Return number of jiffies (1/100ths of a second) that this\n process has been scheduled in user mode. See man 5 proc. \"\"\"\n try:\n f=open(_proc_pid_stat,'r')\n l = f.readline().split(' ')\n f.close()\n return int(l[13])\n except:\n return int(100*(time.time()-_load_time))\n\n def memusage(_proc_pid_stat = '/proc/%s/stat'%(os.getpid())):\n \"\"\" Return virtual memory size in bytes of the running python.\n \"\"\"\n try:\n f=open(_proc_pid_stat,'r')\n l = f.readline().split(' ')\n f.close()\n return int(l[22])\n except:\n return\nelse:\n # os.getpid is not in all platforms available.\n # Using time is safe but inaccurate, especially when process\n # was suspended or sleeping.\n def jiffies(_load_time=time.time()):\n \"\"\" Return number of jiffies (1/100ths of a second) that this\n process has been scheduled in user mode. [Emulation with time.time]. \"\"\"\n return int(100*(time.time()-_load_time))\n\n def memusage():\n \"\"\" Return memory usage of running python. [Not implemented]\"\"\"\n return\n\n__all__.append('ScipyTestCase')\nclass ScipyTestCase (unittest.TestCase):\n\n def measure(self,code_str,times=1):\n \"\"\" Return elapsed time for executing code_str in the\n namespace of the caller for given times.\n \"\"\"\n frame = sys._getframe(1)\n locs,globs = frame.f_locals,frame.f_globals\n code = compile(code_str,\n 'ScipyTestCase runner for '+self.__class__.__name__,\n 'exec')\n i = 0\n elapsed = jiffies()\n while i>> ScipyTest().test(level=1,verbosity=2)\n\n is package name or its module object.\n\n Package is supposed to contain a directory tests/\n with test_*.py files where * refers to the names of submodules.\n\n test_*.py files are supposed to define a classes, derived\n from ScipyTestCase or unittest.TestCase, with methods having\n names starting with test or bench or check.\n\n And that is it! No need to implement test or test_suite functions\n in each .py file.\n\n Also old styled test_suite(level=1) hooks are supported but\n soon to be removed.\n \"\"\"\n def __init__(self, package='__main__'):\n self.package = package\n\n def _module_str(self, module):\n filename = module.__file__[-30:]\n if filename!=module.__file__:\n filename = '...'+filename\n return '' % (`module.__name__`, `filename`)\n\n def _get_method_names(self,clsobj,level):\n names = []\n for mthname in _get_all_method_names(clsobj):\n if mthname[:5] not in ['bench','check'] \\\n and mthname[:4] not in ['test']:\n continue\n mth = getattr(clsobj, mthname)\n if type(mth) is not types.MethodType:\n continue\n d = mth.im_func.func_defaults\n if d is not None:\n mthlevel = d[0]\n else:\n mthlevel = 1\n if level>=mthlevel:\n if mthname not in names:\n names.append(mthname)\n for base in clsobj.__bases__:\n for n in self._get_method_names(base,level):\n if n not in names:\n names.append(n) \n return names\n\n def _get_module_tests(self,module,level):\n mstr = self._module_str\n d,f = os.path.split(module.__file__)\n\n short_module_name = os.path.splitext(os.path.basename(f))[0]\n if short_module_name=='__init__':\n short_module_name = module.__name__.split('.')[-1]\n\n test_dir = os.path.join(d,'tests')\n test_file = os.path.join(test_dir,'test_'+short_module_name+'.py')\n\n local_test_dir = os.path.join(os.getcwd(),'tests')\n local_test_file = os.path.join(local_test_dir,\n 'test_'+short_module_name+'.py')\n if os.path.basename(os.path.dirname(local_test_dir)) \\\n == os.path.basename(os.path.dirname(test_dir)) \\\n and os.path.isfile(local_test_file):\n test_file = local_test_file\n\n if not os.path.isfile(test_file):\n if short_module_name[:5]=='info_' \\\n and short_module_name[5:]==module.__name__.split('.')[-2]:\n return []\n if short_module_name in ['__cvs_version__','__svn_version__']:\n return []\n if short_module_name[-8:]=='_version' \\\n and short_module_name[:-8]==module.__name__.split('.')[-2]:\n return []\n print ' !! No test file %r found for %s' \\\n % (os.path.basename(test_file), mstr(module))\n return []\n\n try:\n if sys.version[:3]=='2.1':\n # Workaround for Python 2.1 .pyc file generator bug\n import random\n pref = '-nopyc'+`random.randint(1,100)`\n else:\n pref = ''\n f = open(test_file,'r')\n test_module = imp.load_module(\\\n module.__name__+'.test_'+short_module_name+pref,\n f, test_file+pref,('.py', 'r', 1))\n f.close()\n if sys.version[:3]=='2.1' and os.path.isfile(test_file+pref+'c'):\n os.remove(test_file+pref+'c')\n except:\n print ' !! FAILURE importing tests for ', mstr(module)\n print ' ',\n output_exception()\n return []\n return self._get_suite_list(test_module, level, module.__name__)\n\n def _get_suite_list(self, test_module, level, module_name='__main__'):\n mstr = self._module_str\n if hasattr(test_module,'test_suite'):\n # Using old styled test suite\n try:\n total_suite = test_module.test_suite(level)\n return total_suite._tests\n except:\n print ' !! FAILURE building tests for ', mstr(test_module)\n print ' ',\n output_exception()\n return []\n suite_list = []\n for name in dir(test_module):\n obj = getattr(test_module, name)\n if type(obj) is not type(unittest.TestCase) \\\n or not issubclass(obj, unittest.TestCase) \\\n or obj.__name__[:4] != 'test':\n continue\n for mthname in self._get_method_names(obj,level):\n suite = obj(mthname)\n if getattr(suite,'isrunnable',lambda mthname:1)(mthname):\n suite_list.append(suite)\n print ' Found',len(suite_list),'tests for',module_name\n return suite_list\n\n def _touch_ppimported(self, module):\n from scipy_base.ppimport import _ModuleLoader\n if os.path.isdir(os.path.join(os.path.dirname(module.__file__),'tests')):\n # only touching those modules that have tests/ directory\n try: module._pliuh_plauh\n except AttributeError: pass\n for name in dir(module):\n obj = getattr(module,name)\n if isinstance(obj,_ModuleLoader) \\\n and not hasattr(obj,'_ppimport_module') \\\n and not hasattr(obj,'_ppimport_exc_info'):\n self._touch_ppimported(obj)\n\n def test(self,level=1,verbosity=1):\n \"\"\" Run Scipy module test suite with level and verbosity.\n \"\"\"\n if type(self.package) is type(''):\n exec 'import %s as this_package' % (self.package)\n else:\n this_package = self.package\n\n self._touch_ppimported(this_package)\n\n package_name = this_package.__name__\n\n suites = []\n for name, module in sys.modules.items():\n if package_name != name[:len(package_name)] \\\n or module is None \\\n or os.path.basename(os.path.dirname(module.__file__))=='tests':\n continue\n suites.extend(self._get_module_tests(module, level))\n\n suites.extend(self._get_suite_list(sys.modules[package_name], level))\n\n all_tests = unittest.TestSuite(suites)\n runner = unittest.TextTestRunner(verbosity=verbosity)\n runner.run(all_tests)\n return runner\n\n def run(self):\n \"\"\" Run Scipy module test suite with level and verbosity\n taken from sys.argv. Requires optparse module.\n \"\"\"\n try:\n from optparse import OptionParser\n except ImportError:\n print 'Failed to import optparse module, ignoring.'\n return self.test()\n usage = r'usage: %prog [-v ] [-l ]'\n parser = OptionParser(usage)\n parser.add_option(\"-v\", \"--verbosity\",\n action=\"store\",\n dest=\"verbosity\",\n default=1,\n type='int')\n parser.add_option(\"-l\", \"--level\",\n action=\"store\",\n dest=\"level\",\n default=1,\n type='int')\n (options, args) = parser.parse_args()\n self.test(options.level,options.verbosity)\n\n#------------\n \ndef remove_ignored_patterns(files,pattern):\n from fnmatch import fnmatch\n good_files = []\n for file in files:\n if not fnmatch(file,pattern):\n good_files.append(file)\n return good_files\n\ndef remove_ignored_files(original,ignored_files,cur_dir):\n \"\"\" This is actually expanded to do pattern matching.\n\n \"\"\"\n if not ignored_files: ignored_files = []\n ignored_modules = map(lambda x: x+'.py',ignored_files)\n ignored_packages = ignored_files[:]\n # always ignore setup.py and __init__.py files\n ignored_files = ['setup.py','setup_*.py','__init__.py']\n ignored_files += ignored_modules + ignored_packages\n ignored_files = map(lambda x,cur_dir=cur_dir: os.path.join(cur_dir,x),\n ignored_files)\n #print 'ignored:', ignored_files\n #good_files = filter(lambda x,ignored = ignored_files: x not in ignored,\n # original)\n good_files = original\n for pattern in ignored_files:\n good_files = remove_ignored_patterns(good_files,pattern)\n\n return good_files\n\n__all__.append('harvest_modules')\ndef harvest_modules(package,ignore=None):\n \"\"\"* Retreive a list of all modules that live within a package.\n\n Only retreive files that are immediate children of the\n package -- do not recurse through child packages or\n directories. The returned list contains actual modules, not\n just their names.\n *\"\"\"\n d,f = os.path.split(package.__file__)\n\n # go through the directory and import every py file there.\n common_dir = os.path.join(d,'*.py')\n py_files = glob.glob(common_dir)\n #py_files.remove(os.path.join(d,'__init__.py'))\n #py_files.remove(os.path.join(d,'setup.py'))\n\n py_files = remove_ignored_files(py_files,ignore,d)\n #print 'py_files:', py_files\n try:\n prefix = package.__name__\n except:\n prefix = ''\n\n all_modules = []\n for file in py_files:\n d,f = os.path.split(file)\n base,ext = os.path.splitext(f)\n mod = prefix + '.' + base\n #print 'module: import ' + mod\n try:\n exec ('import ' + mod)\n all_modules.append(eval(mod))\n except:\n print 'FAILURE to import ' + mod\n output_exception()\n\n return all_modules\n\n__all__.append('harvest_packages')\ndef harvest_packages(package,ignore = None):\n \"\"\" Retreive a list of all sub-packages that live within a package.\n\n Only retreive packages that are immediate children of this\n package -- do not recurse through child packages or\n directories. The returned list contains actual package objects, not\n just their names.\n \"\"\"\n join = os.path.join\n\n d,f = os.path.split(package.__file__)\n\n common_dir = os.path.abspath(d)\n all_files = os.listdir(d)\n\n all_files = remove_ignored_files(all_files,ignore,'')\n #print 'all_files:', all_files\n try:\n prefix = package.__name__\n except:\n prefix = ''\n all_packages = []\n for directory in all_files:\n path = join(common_dir,directory)\n if os.path.isdir(path) and \\\n os.path.exists(join(path,'__init__.py')):\n sub_package = prefix + '.' + directory\n #print 'sub-package import ' + sub_package\n try:\n exec ('import ' + sub_package)\n all_packages.append(eval(sub_package))\n except:\n print 'FAILURE to import ' + sub_package\n output_exception()\n return all_packages\n\n__all__.append('harvest_modules_and_packages')\ndef harvest_modules_and_packages(package,ignore=None):\n \"\"\" Retreive list of all packages and modules that live within a package.\n\n See harvest_packages() and harvest_modules()\n \"\"\"\n all = harvest_modules(package,ignore) + harvest_packages(package,ignore)\n return all\n\n__all__.append('harvest_test_suites')\ndef harvest_test_suites(package,ignore = None,level=10):\n \"\"\"\n package -- the module to test. This is an actual module object\n (not a string)\n ignore -- a list of module names to omit from the tests\n level -- a value between 1 and 10. 1 will run the minimum number\n of tests. This is a fast \"smoke test\". Tests that take\n longer to run should have higher numbers ranging up to 10.\n \"\"\"\n suites=[]\n test_modules = harvest_modules_and_packages(package,ignore)\n #for i in test_modules:\n # print i.__name__\n for module in test_modules:\n if hasattr(module,'test_suite'):\n try:\n suite = module.test_suite(level=level)\n if suite:\n suites.append(suite)\n else:\n print \" !! FAILURE without error - shouldn't happen\",\n print module.__name__\n except:\n print ' !! FAILURE building test for ', module.__name__\n print ' ',\n output_exception()\n else:\n try:\n print 'No test suite found for ', module.__name__\n except AttributeError:\n # __version__.py getting replaced by a string throws a kink\n # in checking for modules, so we think is a module has\n # actually been overwritten\n print 'No test suite found for ', str(module)\n total_suite = unittest.TestSuite(suites)\n return total_suite\n\n__all__.append('module_test')\ndef module_test(mod_name,mod_file,level=10):\n \"\"\"*\n\n *\"\"\"\n #print 'testing', mod_name\n d,f = os.path.split(mod_file)\n\n # insert the tests directory to the python path\n test_dir = os.path.join(d,'tests')\n sys.path.insert(0,test_dir)\n\n # call the \"test_xxx.test()\" function for the appropriate\n # module.\n\n # This should deal with package naming issues correctly\n short_mod_name = string.split(mod_name,'.')[-1]\n test_module = 'test_' + short_mod_name\n test_string = 'import %s;reload(%s);%s.test(%d)' % \\\n ((test_module,)*3 + (level,))\n\n # This would be better cause it forces a reload of the orginal\n # module. It doesn't behave with packages however.\n #test_string = 'reload(%s);import %s;reload(%s);%s.test(%d)' % \\\n # ((mod_name,) + (test_module,)*3)\n exec(test_string)\n\n # remove test directory from python path.\n sys.path = sys.path[1:]\n\n__all__.append('module_test_suite')\ndef module_test_suite(mod_name,mod_file,level=10):\n #try:\n print ' creating test suite for:', mod_name\n d,f = os.path.split(mod_file)\n\n # insert the tests directory to the python path\n test_dir = os.path.join(d,'tests')\n sys.path.insert(0,test_dir)\n\n # call the \"test_xxx.test()\" function for the appropriate\n # module.\n\n # This should deal with package naming issues correctly\n short_mod_name = string.split(mod_name,'.')[-1]\n test_module = 'test_' + short_mod_name\n test_string = 'import %s;reload(%s);suite = %s.test_suite(%d)' % \\\n ((test_module,)*3+(level,))\n #print test_string\n exec(test_string)\n\n # remove test directory from python path.\n sys.path = sys.path[1:]\n return suite\n #except:\n # print ' !! FAILURE loading test suite from', test_module, ':'\n # print ' ',\n # output_exception()\n\n\n# Utility function to facilitate testing.\n\n__all__.append('assert_equal')\ndef assert_equal(actual,desired,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n \"\"\"\n if isinstance(actual, ArrayType) or isinstance(desired, ArrayType):\n return assert_array_equal(actual, desired, err_msg)\n msg = '\\nItems are not equal:\\n' + err_msg\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert desired == actual, msg\n\n__all__.append('assert_almost_equal')\ndef assert_almost_equal(actual,desired,decimal=7,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n \"\"\"\n if isinstance(actual, ArrayType) or isinstance(desired, ArrayType):\n return assert_array_almost_equal(actual, desired, decimal, err_msg)\n msg = '\\nItems are not equal:\\n' + err_msg\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert round(abs(desired - actual),decimal) == 0, msg\n\n__all__.append('assert_approx_equal')\ndef assert_approx_equal(actual,desired,significant=7,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n Approximately equal is defined as the number of significant digits\n correct\n \"\"\"\n msg = '\\nItems are not equal to %d significant digits:\\n' % significant\n msg += err_msg\n actual, desired = map(float, (actual, desired))\n # Normalized the numbers to be in range (-10.0,10.0)\n scale = pow(10,math.floor(math.log10(0.5*(abs(desired)+abs(actual)))))\n try:\n sc_desired = desired/scale\n except ZeroDivisionError:\n sc_desired = 0.0\n try:\n sc_actual = actual/scale\n except ZeroDivisionError:\n sc_actual = 0.0\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert math.fabs(sc_desired - sc_actual) < pow(10.,-1*significant), msg\n\n\n__all__.append('assert_array_equal')\ndef assert_array_equal(x,y,err_msg=''):\n x,y = asarray(x), asarray(y)\n msg = '\\nArrays are not equal'\n try:\n assert 0 in [len(shape(x)),len(shape(y))] \\\n or (len(shape(x))==len(shape(y)) and \\\n alltrue(equal(shape(x),shape(y)))),\\\n msg + ' (shapes %s, %s mismatch):\\n\\t' \\\n % (shape(x),shape(y)) + err_msg\n reduced = ravel(equal(x,y))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=16)\n s2 = array2string(y,precision=16)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n raise ValueError, msg\n\n__all__.append('assert_array_almost_equal')\ndef assert_array_almost_equal(x,y,decimal=6,err_msg=''):\n x = asarray(x)\n y = asarray(y)\n msg = '\\nArrays are not almost equal'\n try:\n cond = alltrue(equal(shape(x),shape(y)))\n if not cond:\n msg = msg + ' (shapes mismatch):\\n\\t'\\\n 'Shape of array 1: %s\\n\\tShape of array 2: %s' % (shape(x),shape(y))\n assert cond, msg + '\\n\\t' + err_msg\n reduced = ravel(equal(less_equal(around(abs(x-y),decimal),10.0**(-decimal)),1))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=decimal+1)\n s2 = array2string(y,precision=decimal+1)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n print sys.exc_value\n print shape(x),shape(y)\n print x, y\n raise ValueError, 'arrays are not almost equal'\n\n__all__.append('assert_array_less')\ndef assert_array_less(x,y,err_msg=''):\n x,y = asarray(x), asarray(y)\n msg = '\\nArrays are not less-ordered'\n try:\n assert alltrue(equal(shape(x),shape(y))),\\\n msg + ' (shapes mismatch):\\n\\t' + err_msg\n reduced = ravel(less(x,y))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=16)\n s2 = array2string(y,precision=16)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n print shape(x),shape(y)\n raise ValueError, 'arrays are not less-ordered'\n\n__all__.append('rand')\ndef rand(*args):\n \"\"\" Returns an array of random numbers with the given shape.\n used for testing\n \"\"\"\n import random\n results = zeros(args,Float64)\n f = results.flat\n for i in range(len(f)):\n f[i] = random.random()\n return results\n\ndef output_exception():\n try:\n type, value, tb = sys.exc_info()\n info = traceback.extract_tb(tb)\n #this is more verbose\n #traceback.print_exc()\n filename, lineno, function, text = info[-1] # last line only\n print \"%s:%d: %s: %s (in %s)\" %\\\n (filename, lineno, type.__name__, str(value), function)\n finally:\n type = value = tb = None # clean up\n\ntry:\n from scipy_base.numerix import fastumath as math\nexcept ImportError,msg:\n print msg\n import math\n", "source_code_before": "\"\"\"\nUnit-testing\n------------\n\n ScipyTest -- Scipy tests site manager\n ScipyTestCase -- unittest.TestCase with measure method\n IgnoreException -- raise when checking disabled feature ('ignoring' is displayed)\n set_package_path -- prepend package build directory to path\n set_local_path -- prepend local directory (to tests files) to path\n restore_path -- restore path after set_package_path\n\nTiming tools\n------------\n\n jiffies -- return 1/100ths of a second that the current process has used\n memusage -- virtual memory size in bytes of the running python [linux]\n\nUtility functions\n-----------------\n\n assert_equal -- assert equality\n assert_almost_equal -- assert equality with decimal tolerance\n assert_approx_equal -- assert equality with significant digits tolerance\n assert_array_equal -- assert arrays equality\n assert_array_almost_equal -- assert arrays equality with decimal tolerance\n assert_array_less -- assert arrays less-ordering\n rand -- array of random numbers from given shape\n\n\"\"\"\n\n__all__ = []\n\nimport os,sys,time,glob,string,traceback,unittest\nimport types\nimport imp\n\ntry:\n # These are used by Numeric tests.\n # If Numeric and scipy_base are not available, then some of the\n # functions below will not be available.\n from Numeric import alltrue,equal,shape,ravel,around,zeros,Float64,asarray,\\\n less_equal,array2string,less,ArrayType\n # `import scipy_base.fastumath as math` must be at the end of this file.\nexcept ImportError,msg:\n print msg\n\nDEBUG = 0\n\n__all__.append('set_package_path')\ndef set_package_path(level=1):\n \"\"\" Prepend package directory to sys.path.\n\n set_package_path should be called from a test_file.py that\n satisfies the following tree structure:\n\n //test_file.py\n\n Then the first existing path name from the following list\n\n /build/lib.-\n /..\n\n is prepended to sys.path.\n The caller is responsible for removing this path by using\n\n restore_path()\n \"\"\"\n from distutils.util import get_platform\n from scipy_distutils.misc_util import get_frame\n f = get_frame(level)\n if f.f_locals['__name__']=='__main__':\n testfile = sys.argv[0]\n else:\n testfile = f.f_locals['__file__']\n d = os.path.dirname(os.path.dirname(os.path.abspath(testfile)))\n d1 = os.path.join(d,'build','lib.%s-%s'%(get_platform(),sys.version[:3]))\n if not os.path.isdir(d1):\n d1 = os.path.dirname(d)\n if DEBUG:\n print 'Inserting %r to sys.path' % (d1)\n sys.path.insert(0,d1)\n\n__all__.append('set_local_path')\ndef set_local_path(reldir='', level=1):\n \"\"\" Prepend local directory to sys.path.\n\n The caller is responsible for removing this path by using\n\n restore_path()\n \"\"\"\n from scipy_distutils.misc_util import get_frame\n f = get_frame(level)\n if f.f_locals['__name__']=='__main__':\n testfile = sys.argv[0]\n else:\n testfile = f.f_locals['__file__']\n local_path = os.path.join(os.path.dirname(os.path.abspath(testfile)),reldir)\n if DEBUG:\n print 'Inserting %r to sys.path' % (local_path)\n sys.path.insert(0,local_path)\n\n__all__.append('restore_path')\ndef restore_path():\n if DEBUG:\n print 'Removing %r from sys.path' % (sys.path[0])\n del sys.path[0]\n\n__all__.extend(['jiffies','memusage'])\nif sys.platform[:5]=='linux':\n def jiffies(_proc_pid_stat = '/proc/%s/stat'%(os.getpid()),\n _load_time=time.time()):\n \"\"\" Return number of jiffies (1/100ths of a second) that this\n process has been scheduled in user mode. See man 5 proc. \"\"\"\n try:\n f=open(_proc_pid_stat,'r')\n l = f.readline().split(' ')\n f.close()\n return int(l[13])\n except:\n return int(100*(time.time()-_load_time))\n\n def memusage(_proc_pid_stat = '/proc/%s/stat'%(os.getpid())):\n \"\"\" Return virtual memory size in bytes of the running python.\n \"\"\"\n try:\n f=open(_proc_pid_stat,'r')\n l = f.readline().split(' ')\n f.close()\n return int(l[22])\n except:\n return\nelse:\n # os.getpid is not in all platforms available.\n # Using time is safe but inaccurate, especially when process\n # was suspended or sleeping.\n def jiffies(_load_time=time.time()):\n \"\"\" Return number of jiffies (1/100ths of a second) that this\n process has been scheduled in user mode. [Emulation with time.time]. \"\"\"\n return int(100*(time.time()-_load_time))\n\n def memusage():\n \"\"\" Return memory usage of running python. [Not implemented]\"\"\"\n return\n\n__all__.append('ScipyTestCase')\nclass ScipyTestCase (unittest.TestCase):\n\n def measure(self,code_str,times=1):\n \"\"\" Return elapsed time for executing code_str in the\n namespace of the caller for given times.\n \"\"\"\n frame = sys._getframe(1)\n locs,globs = frame.f_locals,frame.f_globals\n code = compile(code_str,\n 'ScipyTestCase runner for '+self.__class__.__name__,\n 'exec')\n i = 0\n elapsed = jiffies()\n while i>> ScipyTest().test(level=1,verbosity=2)\n\n is package name or its module object.\n\n Package is supposed to contain a directory tests/\n with test_*.py files where * refers to the names of submodules.\n\n test_*.py files are supposed to define a classes, derived\n from ScipyTestCase or unittest.TestCase, with methods having\n names starting with test or bench or check.\n\n And that is it! No need to implement test or test_suite functions\n in each .py file.\n\n Also old styled test_suite(level=1) hooks are supported but\n soon to be removed.\n \"\"\"\n def __init__(self, package='__main__'):\n self.package = package\n\n def _module_str(self, module):\n filename = module.__file__[-30:]\n if filename!=module.__file__:\n filename = '...'+filename\n return '' % (`module.__name__`, `filename`)\n\n def _get_method_names(self,clsobj,level):\n names = []\n for mthname in _get_all_method_names(clsobj):\n if mthname[:5] not in ['bench','check'] \\\n and mthname[:4] not in ['test']:\n continue\n mth = getattr(clsobj, mthname)\n if type(mth) is not types.MethodType:\n continue\n d = mth.im_func.func_defaults\n if d is not None:\n mthlevel = d[0]\n else:\n mthlevel = 1\n if level>=mthlevel:\n if mthname not in names:\n names.append(mthname)\n for base in clsobj.__bases__:\n for n in self._get_method_names(base,level):\n if n not in names:\n names.append(n) \n return names\n\n def _get_module_tests(self,module,level):\n mstr = self._module_str\n d,f = os.path.split(module.__file__)\n\n short_module_name = os.path.splitext(os.path.basename(f))[0]\n if short_module_name=='__init__':\n short_module_name = module.__name__.split('.')[-1]\n\n test_dir = os.path.join(d,'tests')\n test_file = os.path.join(test_dir,'test_'+short_module_name+'.py')\n\n local_test_dir = os.path.join(os.getcwd(),'tests')\n local_test_file = os.path.join(local_test_dir,\n 'test_'+short_module_name+'.py')\n if os.path.basename(os.path.dirname(local_test_dir)) \\\n == os.path.basename(os.path.dirname(test_dir)) \\\n and os.path.isfile(local_test_file):\n test_file = local_test_file\n\n if not os.path.isfile(test_file):\n if short_module_name[:5]=='info_' \\\n and short_module_name[5:]==module.__name__.split('.')[-2]:\n return []\n if short_module_name in ['__cvs_version__','__svn_version__']:\n return []\n if short_module_name[-8:]=='_version' \\\n and short_module_name[:-8]==module.__name__.split('.')[-2]:\n return []\n print ' !! No test file %r found for %s' \\\n % (os.path.basename(test_file), mstr(module))\n return []\n\n try:\n if sys.version[:3]=='2.1':\n # Workaround for Python 2.1 .pyc file generator bug\n import random\n pref = '-nopyc'+`random.randint(1,100)`\n else:\n pref = ''\n f = open(test_file,'r')\n test_module = imp.load_module(\\\n module.__name__+'.test_'+short_module_name+pref,\n f, test_file+pref,('.py', 'r', 1))\n f.close()\n if sys.version[:3]=='2.1' and os.path.isfile(test_file+pref+'c'):\n os.remove(test_file+pref+'c')\n except:\n print ' !! FAILURE importing tests for ', mstr(module)\n print ' ',\n output_exception()\n return []\n return self._get_suite_list(test_module, level, module.__name__)\n\n def _get_suite_list(self, test_module, level, module_name='__main__'):\n mstr = self._module_str\n if hasattr(test_module,'test_suite'):\n # Using old styled test suite\n try:\n total_suite = test_module.test_suite(level)\n return total_suite._tests\n except:\n print ' !! FAILURE building tests for ', mstr(test_module)\n print ' ',\n output_exception()\n return []\n suite_list = []\n for name in dir(test_module):\n obj = getattr(test_module, name)\n if type(obj) is not type(unittest.TestCase) \\\n or not issubclass(obj, unittest.TestCase) \\\n or obj.__name__[:4] != 'test':\n continue\n for mthname in self._get_method_names(obj,level):\n suite = obj(mthname)\n if getattr(suite,'isrunnable',lambda mthname:1)(mthname):\n suite_list.append(suite)\n print ' Found',len(suite_list),'tests for',module_name\n return suite_list\n\n def _touch_ppimported(self, module):\n from scipy_base.ppimport import _ModuleLoader\n if os.path.isdir(os.path.join(os.path.dirname(module.__file__),'tests')):\n # only touching those modules that have tests/ directory\n try: module._pliuh_plauh\n except AttributeError: pass\n for name in dir(module):\n obj = getattr(module,name)\n if isinstance(obj,_ModuleLoader) \\\n and not hasattr(obj,'_ppimport_module') \\\n and not hasattr(obj,'_ppimport_exc_info'):\n self._touch_ppimported(obj)\n\n def test(self,level=1,verbosity=1):\n \"\"\" Run Scipy module test suite with level and verbosity.\n \"\"\"\n if type(self.package) is type(''):\n exec 'import %s as this_package' % (self.package)\n else:\n this_package = self.package\n\n self._touch_ppimported(this_package)\n\n package_name = this_package.__name__\n\n suites = []\n for name, module in sys.modules.items():\n if package_name != name[:len(package_name)] \\\n or module is None \\\n or os.path.basename(os.path.dirname(module.__file__))=='tests':\n continue\n suites.extend(self._get_module_tests(module, level))\n\n suites.extend(self._get_suite_list(sys.modules[package_name], level))\n\n all_tests = unittest.TestSuite(suites)\n runner = unittest.TextTestRunner(verbosity=verbosity)\n runner.run(all_tests)\n return runner\n\n def run(self):\n \"\"\" Run Scipy module test suite with level and verbosity\n taken from sys.argv. Requires optparse module.\n \"\"\"\n try:\n from optparse import OptionParser\n except ImportError:\n print 'Failed to import optparse module, ignoring.'\n return self.test()\n usage = r'usage: %prog [-v ] [-l ]'\n parser = OptionParser(usage)\n parser.add_option(\"-v\", \"--verbosity\",\n action=\"store\",\n dest=\"verbosity\",\n default=1,\n type='int')\n parser.add_option(\"-l\", \"--level\",\n action=\"store\",\n dest=\"level\",\n default=1,\n type='int')\n (options, args) = parser.parse_args()\n self.test(options.level,options.verbosity)\n\n#------------\n \ndef remove_ignored_patterns(files,pattern):\n from fnmatch import fnmatch\n good_files = []\n for file in files:\n if not fnmatch(file,pattern):\n good_files.append(file)\n return good_files\n\ndef remove_ignored_files(original,ignored_files,cur_dir):\n \"\"\" This is actually expanded to do pattern matching.\n\n \"\"\"\n if not ignored_files: ignored_files = []\n ignored_modules = map(lambda x: x+'.py',ignored_files)\n ignored_packages = ignored_files[:]\n # always ignore setup.py and __init__.py files\n ignored_files = ['setup.py','setup_*.py','__init__.py']\n ignored_files += ignored_modules + ignored_packages\n ignored_files = map(lambda x,cur_dir=cur_dir: os.path.join(cur_dir,x),\n ignored_files)\n #print 'ignored:', ignored_files\n #good_files = filter(lambda x,ignored = ignored_files: x not in ignored,\n # original)\n good_files = original\n for pattern in ignored_files:\n good_files = remove_ignored_patterns(good_files,pattern)\n\n return good_files\n\n__all__.append('harvest_modules')\ndef harvest_modules(package,ignore=None):\n \"\"\"* Retreive a list of all modules that live within a package.\n\n Only retreive files that are immediate children of the\n package -- do not recurse through child packages or\n directories. The returned list contains actual modules, not\n just their names.\n *\"\"\"\n d,f = os.path.split(package.__file__)\n\n # go through the directory and import every py file there.\n common_dir = os.path.join(d,'*.py')\n py_files = glob.glob(common_dir)\n #py_files.remove(os.path.join(d,'__init__.py'))\n #py_files.remove(os.path.join(d,'setup.py'))\n\n py_files = remove_ignored_files(py_files,ignore,d)\n #print 'py_files:', py_files\n try:\n prefix = package.__name__\n except:\n prefix = ''\n\n all_modules = []\n for file in py_files:\n d,f = os.path.split(file)\n base,ext = os.path.splitext(f)\n mod = prefix + '.' + base\n #print 'module: import ' + mod\n try:\n exec ('import ' + mod)\n all_modules.append(eval(mod))\n except:\n print 'FAILURE to import ' + mod\n output_exception()\n\n return all_modules\n\n__all__.append('harvest_packages')\ndef harvest_packages(package,ignore = None):\n \"\"\" Retreive a list of all sub-packages that live within a package.\n\n Only retreive packages that are immediate children of this\n package -- do not recurse through child packages or\n directories. The returned list contains actual package objects, not\n just their names.\n \"\"\"\n join = os.path.join\n\n d,f = os.path.split(package.__file__)\n\n common_dir = os.path.abspath(d)\n all_files = os.listdir(d)\n\n all_files = remove_ignored_files(all_files,ignore,'')\n #print 'all_files:', all_files\n try:\n prefix = package.__name__\n except:\n prefix = ''\n all_packages = []\n for directory in all_files:\n path = join(common_dir,directory)\n if os.path.isdir(path) and \\\n os.path.exists(join(path,'__init__.py')):\n sub_package = prefix + '.' + directory\n #print 'sub-package import ' + sub_package\n try:\n exec ('import ' + sub_package)\n all_packages.append(eval(sub_package))\n except:\n print 'FAILURE to import ' + sub_package\n output_exception()\n return all_packages\n\n__all__.append('harvest_modules_and_packages')\ndef harvest_modules_and_packages(package,ignore=None):\n \"\"\" Retreive list of all packages and modules that live within a package.\n\n See harvest_packages() and harvest_modules()\n \"\"\"\n all = harvest_modules(package,ignore) + harvest_packages(package,ignore)\n return all\n\n__all__.append('harvest_test_suites')\ndef harvest_test_suites(package,ignore = None,level=10):\n \"\"\"\n package -- the module to test. This is an actual module object\n (not a string)\n ignore -- a list of module names to omit from the tests\n level -- a value between 1 and 10. 1 will run the minimum number\n of tests. This is a fast \"smoke test\". Tests that take\n longer to run should have higher numbers ranging up to 10.\n \"\"\"\n suites=[]\n test_modules = harvest_modules_and_packages(package,ignore)\n #for i in test_modules:\n # print i.__name__\n for module in test_modules:\n if hasattr(module,'test_suite'):\n try:\n suite = module.test_suite(level=level)\n if suite:\n suites.append(suite)\n else:\n print \" !! FAILURE without error - shouldn't happen\",\n print module.__name__\n except:\n print ' !! FAILURE building test for ', module.__name__\n print ' ',\n output_exception()\n else:\n try:\n print 'No test suite found for ', module.__name__\n except AttributeError:\n # __version__.py getting replaced by a string throws a kink\n # in checking for modules, so we think is a module has\n # actually been overwritten\n print 'No test suite found for ', str(module)\n total_suite = unittest.TestSuite(suites)\n return total_suite\n\n__all__.append('module_test')\ndef module_test(mod_name,mod_file,level=10):\n \"\"\"*\n\n *\"\"\"\n #print 'testing', mod_name\n d,f = os.path.split(mod_file)\n\n # insert the tests directory to the python path\n test_dir = os.path.join(d,'tests')\n sys.path.insert(0,test_dir)\n\n # call the \"test_xxx.test()\" function for the appropriate\n # module.\n\n # This should deal with package naming issues correctly\n short_mod_name = string.split(mod_name,'.')[-1]\n test_module = 'test_' + short_mod_name\n test_string = 'import %s;reload(%s);%s.test(%d)' % \\\n ((test_module,)*3 + (level,))\n\n # This would be better cause it forces a reload of the orginal\n # module. It doesn't behave with packages however.\n #test_string = 'reload(%s);import %s;reload(%s);%s.test(%d)' % \\\n # ((mod_name,) + (test_module,)*3)\n exec(test_string)\n\n # remove test directory from python path.\n sys.path = sys.path[1:]\n\n__all__.append('module_test_suite')\ndef module_test_suite(mod_name,mod_file,level=10):\n #try:\n print ' creating test suite for:', mod_name\n d,f = os.path.split(mod_file)\n\n # insert the tests directory to the python path\n test_dir = os.path.join(d,'tests')\n sys.path.insert(0,test_dir)\n\n # call the \"test_xxx.test()\" function for the appropriate\n # module.\n\n # This should deal with package naming issues correctly\n short_mod_name = string.split(mod_name,'.')[-1]\n test_module = 'test_' + short_mod_name\n test_string = 'import %s;reload(%s);suite = %s.test_suite(%d)' % \\\n ((test_module,)*3+(level,))\n #print test_string\n exec(test_string)\n\n # remove test directory from python path.\n sys.path = sys.path[1:]\n return suite\n #except:\n # print ' !! FAILURE loading test suite from', test_module, ':'\n # print ' ',\n # output_exception()\n\n\n# Utility function to facilitate testing.\n\n__all__.append('assert_equal')\ndef assert_equal(actual,desired,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n \"\"\"\n if isinstance(actual, ArrayType) or isinstance(desired, ArrayType):\n return assert_array_equal(actual, desired, err_msg)\n msg = '\\nItems are not equal:\\n' + err_msg\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert desired == actual, msg\n\n__all__.append('assert_almost_equal')\ndef assert_almost_equal(actual,desired,decimal=7,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n \"\"\"\n if isinstance(actual, ArrayType) or isinstance(desired, ArrayType):\n return assert_array_almost_equal(actual, desired, decimal, err_msg)\n msg = '\\nItems are not equal:\\n' + err_msg\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert round(abs(desired - actual),decimal) == 0, msg\n\n__all__.append('assert_approx_equal')\ndef assert_approx_equal(actual,desired,significant=7,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n Approximately equal is defined as the number of significant digits\n correct\n \"\"\"\n msg = '\\nItems are not equal to %d significant digits:\\n' % significant\n msg += err_msg\n actual, desired = map(float, (actual, desired))\n # Normalized the numbers to be in range (-10.0,10.0)\n scale = pow(10,math.floor(math.log10(0.5*(abs(desired)+abs(actual)))))\n try:\n sc_desired = desired/scale\n except ZeroDivisionError:\n sc_desired = 0.0\n try:\n sc_actual = actual/scale\n except ZeroDivisionError:\n sc_actual = 0.0\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert math.fabs(sc_desired - sc_actual) < pow(10.,-1*significant), msg\n\n\n__all__.append('assert_array_equal')\ndef assert_array_equal(x,y,err_msg=''):\n x,y = asarray(x), asarray(y)\n msg = '\\nArrays are not equal'\n try:\n assert 0 in [len(shape(x)),len(shape(y))] \\\n or (len(shape(x))==len(shape(y)) and \\\n alltrue(equal(shape(x),shape(y)))),\\\n msg + ' (shapes %s, %s mismatch):\\n\\t' \\\n % (shape(x),shape(y)) + err_msg\n reduced = ravel(equal(x,y))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=16)\n s2 = array2string(y,precision=16)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n raise ValueError, msg\n\n__all__.append('assert_array_almost_equal')\ndef assert_array_almost_equal(x,y,decimal=6,err_msg=''):\n x = asarray(x)\n y = asarray(y)\n msg = '\\nArrays are not almost equal'\n try:\n cond = alltrue(equal(shape(x),shape(y)))\n if not cond:\n msg = msg + ' (shapes mismatch):\\n\\t'\\\n 'Shape of array 1: %s\\n\\tShape of array 2: %s' % (shape(x),shape(y))\n assert cond, msg + '\\n\\t' + err_msg\n reduced = ravel(equal(less_equal(around(abs(x-y),decimal),10.0**(-decimal)),1))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=decimal+1)\n s2 = array2string(y,precision=decimal+1)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n print sys.exc_value\n print shape(x),shape(y)\n print x, y\n raise ValueError, 'arrays are not almost equal'\n\n__all__.append('assert_array_less')\ndef assert_array_less(x,y,err_msg=''):\n x,y = asarray(x), asarray(y)\n msg = '\\nArrays are not less-ordered'\n try:\n assert alltrue(equal(shape(x),shape(y))),\\\n msg + ' (shapes mismatch):\\n\\t' + err_msg\n reduced = ravel(less(x,y))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=16)\n s2 = array2string(y,precision=16)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n print shape(x),shape(y)\n raise ValueError, 'arrays are not less-ordered'\n\n__all__.append('rand')\ndef rand(*args):\n \"\"\" Returns an array of random numbers with the given shape.\n used for testing\n \"\"\"\n import random\n results = zeros(args,Float64)\n f = results.flat\n for i in range(len(f)):\n f[i] = random.random()\n return results\n\ndef output_exception():\n try:\n type, value, tb = sys.exc_info()\n info = traceback.extract_tb(tb)\n #this is more verbose\n #traceback.print_exc()\n filename, lineno, function, text = info[-1] # last line only\n print \"%s:%d: %s: %s (in %s)\" %\\\n (filename, lineno, type.__name__, str(value), function)\n finally:\n type = value = tb = None # clean up\n\ntry:\n import scipy_base.fastumath as math\nexcept ImportError,msg:\n print msg\n import math\n", "methods": [ { "name": "set_package_path", "long_name": "set_package_path( level = 1 )", "filename": "testing.py", "nloc": 15, "complexity": 4, "token_count": 146, "parameters": [ "level" ], "start_line": 46, "end_line": 77, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 32, "top_nesting_level": 0 }, { "name": "set_local_path", "long_name": "set_local_path( reldir = '' , level = 1 )", "filename": "testing.py", "nloc": 11, "complexity": 3, "token_count": 97, "parameters": [ "reldir", "level" ], "start_line": 80, "end_line": 96, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "restore_path", "long_name": "restore_path( )", "filename": "testing.py", "nloc": 4, "complexity": 2, "token_count": 25, "parameters": [], "start_line": 99, "end_line": 102, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "jiffies", "long_name": "jiffies( _proc_pid_stat = '/proc/%s/stat' % ( os . getpid ( )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 23, "parameters": [ "_proc_pid_stat" ], "start_line": 106, "end_line": 107, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "memusage", "long_name": "memusage( _proc_pid_stat = '/proc/%s/stat' % ( os . getpid ( )", "filename": "testing.py", "nloc": 10, "complexity": 2, "token_count": 54, "parameters": [ "_proc_pid_stat" ], "start_line": 118, "end_line": 127, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "jiffies", "long_name": "jiffies( _load_time = time . time ( )", "filename": "testing.py", "nloc": 4, "complexity": 1, "token_count": 27, "parameters": [ "_load_time" ], "start_line": 132, "end_line": 135, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "memusage", "long_name": "memusage( )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 6, "parameters": [], "start_line": 137, "end_line": 139, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "measure", "long_name": "measure( self , code_str , times = 1 )", "filename": "testing.py", "nloc": 13, "complexity": 2, "token_count": 82, "parameters": [ "self", "code_str", "times" ], "start_line": 144, "end_line": 159, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 1 }, { "name": "__call__", "long_name": "__call__( self , result = None )", "filename": "testing.py", "nloc": 24, "complexity": 6, "token_count": 224, "parameters": [ "self", "result" ], "start_line": 161, "end_line": 185, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self , stream )", "filename": "testing.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "self", "stream" ], "start_line": 188, "end_line": 190, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "write", "long_name": "write( self , message )", "filename": "testing.py", "nloc": 6, "complexity": 3, "token_count": 47, "parameters": [ "self", "message" ], "start_line": 191, "end_line": 196, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "writeln", "long_name": "writeln( self , message )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 15, "parameters": [ "self", "message" ], "start_line": 197, "end_line": 198, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "_get_all_method_names", "long_name": "_get_all_method_names( cls )", "filename": "testing.py", "nloc": 8, "complexity": 5, "token_count": 56, "parameters": [ "cls" ], "start_line": 206, "end_line": 213, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , package = '__main__' )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "self", "package" ], "start_line": 237, "end_line": 238, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "_module_str", "long_name": "_module_str( self , module )", "filename": "testing.py", "nloc": 5, "complexity": 2, "token_count": 43, "parameters": [ "self", "module" ], "start_line": 240, "end_line": 244, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "_get_method_names", "long_name": "_get_method_names( self , clsobj , level )", "filename": "testing.py", "nloc": 22, "complexity": 11, "token_count": 142, "parameters": [ "self", "clsobj", "level" ], "start_line": 246, "end_line": 267, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "_get_module_tests", "long_name": "_get_module_tests( self , module , level )", "filename": "testing.py", "nloc": 46, "complexity": 14, "token_count": 428, "parameters": [ "self", "module", "level" ], "start_line": 269, "end_line": 320, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 52, "top_nesting_level": 1 }, { "name": "_get_suite_list", "long_name": "_get_suite_list( self , test_module , level , module_name = '__main__' )", "filename": "testing.py", "nloc": 24, "complexity": 9, "token_count": 168, "parameters": [ "self", "test_module", "level", "module_name" ], "start_line": 322, "end_line": 346, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 1 }, { "name": "_touch_ppimported", "long_name": "_touch_ppimported( self , module )", "filename": "testing.py", "nloc": 11, "complexity": 7, "token_count": 98, "parameters": [ "self", "module" ], "start_line": 348, "end_line": 359, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 1 }, { "name": "test", "long_name": "test( self , level = 1 , verbosity = 1 )", "filename": "testing.py", "nloc": 19, "complexity": 6, "token_count": 166, "parameters": [ "self", "level", "verbosity" ], "start_line": 361, "end_line": 386, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "testing.py", "nloc": 20, "complexity": 2, "token_count": 104, "parameters": [ "self" ], "start_line": 388, "end_line": 410, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "remove_ignored_patterns", "long_name": "remove_ignored_patterns( files , pattern )", "filename": "testing.py", "nloc": 7, "complexity": 3, "token_count": 37, "parameters": [ "files", "pattern" ], "start_line": 414, "end_line": 420, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "remove_ignored_files", "long_name": "remove_ignored_files( original , ignored_files , cur_dir )", "filename": "testing.py", "nloc": 12, "complexity": 3, "token_count": 93, "parameters": [ "original", "ignored_files", "cur_dir" ], "start_line": 422, "end_line": 441, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "harvest_modules", "long_name": "harvest_modules( package , ignore = None )", "filename": "testing.py", "nloc": 21, "complexity": 4, "token_count": 134, "parameters": [ "package", "ignore" ], "start_line": 444, "end_line": 480, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 37, "top_nesting_level": 0 }, { "name": "harvest_packages", "long_name": "harvest_packages( package , ignore = None )", "filename": "testing.py", "nloc": 23, "complexity": 6, "token_count": 148, "parameters": [ "package", "ignore" ], "start_line": 483, "end_line": 517, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 35, "top_nesting_level": 0 }, { "name": "harvest_modules_and_packages", "long_name": "harvest_modules_and_packages( package , ignore = None )", "filename": "testing.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "package", "ignore" ], "start_line": 520, "end_line": 526, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "harvest_test_suites", "long_name": "harvest_test_suites( package , ignore = None , level = 10 )", "filename": "testing.py", "nloc": 23, "complexity": 6, "token_count": 113, "parameters": [ "package", "ignore", "level" ], "start_line": 529, "end_line": 564, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 0 }, { "name": "module_test", "long_name": "module_test( mod_name , mod_file , level = 10 )", "filename": "testing.py", "nloc": 10, "complexity": 1, "token_count": 98, "parameters": [ "mod_name", "mod_file", "level" ], "start_line": 567, "end_line": 594, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 28, "top_nesting_level": 0 }, { "name": "module_test_suite", "long_name": "module_test_suite( mod_name , mod_file , level = 10 )", "filename": "testing.py", "nloc": 12, "complexity": 1, "token_count": 103, "parameters": [ "mod_name", "mod_file", "level" ], "start_line": 597, "end_line": 619, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 0 }, { "name": "assert_equal", "long_name": "assert_equal( actual , desired , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 14, "complexity": 7, "token_count": 116, "parameters": [ "actual", "desired", "err_msg", "verbose" ], "start_line": 629, "end_line": 645, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "assert_almost_equal", "long_name": "assert_almost_equal( actual , desired , decimal = 7 , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 14, "complexity": 7, "token_count": 132, "parameters": [ "actual", "desired", "decimal", "err_msg", "verbose" ], "start_line": 648, "end_line": 664, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "assert_approx_equal", "long_name": "assert_approx_equal( actual , desired , significant = 7 , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 23, "complexity": 7, "token_count": 191, "parameters": [ "actual", "desired", "significant", "err_msg", "verbose" ], "start_line": 667, "end_line": 695, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 0 }, { "name": "assert_array_equal", "long_name": "assert_array_equal( x , y , err_msg = '' )", "filename": "testing.py", "nloc": 22, "complexity": 7, "token_count": 232, "parameters": [ "x", "y", "err_msg" ], "start_line": 699, "end_line": 720, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 0 }, { "name": "assert_array_almost_equal", "long_name": "assert_array_almost_equal( x , y , decimal = 6 , err_msg = '' )", "filename": "testing.py", "nloc": 26, "complexity": 6, "token_count": 251, "parameters": [ "x", "y", "decimal", "err_msg" ], "start_line": 723, "end_line": 748, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 0 }, { "name": "assert_array_less", "long_name": "assert_array_less( x , y , err_msg = '' )", "filename": "testing.py", "nloc": 20, "complexity": 5, "token_count": 189, "parameters": [ "x", "y", "err_msg" ], "start_line": 751, "end_line": 770, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "rand", "long_name": "rand( * args )", "filename": "testing.py", "nloc": 7, "complexity": 2, "token_count": 45, "parameters": [ "args" ], "start_line": 773, "end_line": 782, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "output_exception", "long_name": "output_exception( )", "filename": "testing.py", "nloc": 9, "complexity": 2, "token_count": 67, "parameters": [], "start_line": 784, "end_line": 794, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 0 } ], "methods_before": [ { "name": "set_package_path", "long_name": "set_package_path( level = 1 )", "filename": "testing.py", "nloc": 15, "complexity": 4, "token_count": 146, "parameters": [ "level" ], "start_line": 50, "end_line": 81, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 32, "top_nesting_level": 0 }, { "name": "set_local_path", "long_name": "set_local_path( reldir = '' , level = 1 )", "filename": "testing.py", "nloc": 11, "complexity": 3, "token_count": 97, "parameters": [ "reldir", "level" ], "start_line": 84, "end_line": 100, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "restore_path", "long_name": "restore_path( )", "filename": "testing.py", "nloc": 4, "complexity": 2, "token_count": 25, "parameters": [], "start_line": 103, "end_line": 106, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "jiffies", "long_name": "jiffies( _proc_pid_stat = '/proc/%s/stat' % ( os . getpid ( )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 23, "parameters": [ "_proc_pid_stat" ], "start_line": 110, "end_line": 111, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "memusage", "long_name": "memusage( _proc_pid_stat = '/proc/%s/stat' % ( os . getpid ( )", "filename": "testing.py", "nloc": 10, "complexity": 2, "token_count": 54, "parameters": [ "_proc_pid_stat" ], "start_line": 122, "end_line": 131, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "jiffies", "long_name": "jiffies( _load_time = time . time ( )", "filename": "testing.py", "nloc": 4, "complexity": 1, "token_count": 27, "parameters": [ "_load_time" ], "start_line": 136, "end_line": 139, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "memusage", "long_name": "memusage( )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 6, "parameters": [], "start_line": 141, "end_line": 143, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "measure", "long_name": "measure( self , code_str , times = 1 )", "filename": "testing.py", "nloc": 13, "complexity": 2, "token_count": 82, "parameters": [ "self", "code_str", "times" ], "start_line": 148, "end_line": 163, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 1 }, { "name": "__call__", "long_name": "__call__( self , result = None )", "filename": "testing.py", "nloc": 24, "complexity": 6, "token_count": 224, "parameters": [ "self", "result" ], "start_line": 165, "end_line": 189, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self , stream )", "filename": "testing.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "self", "stream" ], "start_line": 192, "end_line": 194, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "write", "long_name": "write( self , message )", "filename": "testing.py", "nloc": 6, "complexity": 3, "token_count": 47, "parameters": [ "self", "message" ], "start_line": 195, "end_line": 200, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "writeln", "long_name": "writeln( self , message )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 15, "parameters": [ "self", "message" ], "start_line": 201, "end_line": 202, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "_get_all_method_names", "long_name": "_get_all_method_names( cls )", "filename": "testing.py", "nloc": 8, "complexity": 5, "token_count": 56, "parameters": [ "cls" ], "start_line": 210, "end_line": 217, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , package = '__main__' )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "self", "package" ], "start_line": 241, "end_line": 242, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "_module_str", "long_name": "_module_str( self , module )", "filename": "testing.py", "nloc": 5, "complexity": 2, "token_count": 43, "parameters": [ "self", "module" ], "start_line": 244, "end_line": 248, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "_get_method_names", "long_name": "_get_method_names( self , clsobj , level )", "filename": "testing.py", "nloc": 22, "complexity": 11, "token_count": 142, "parameters": [ "self", "clsobj", "level" ], "start_line": 250, "end_line": 271, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "_get_module_tests", "long_name": "_get_module_tests( self , module , level )", "filename": "testing.py", "nloc": 46, "complexity": 14, "token_count": 428, "parameters": [ "self", "module", "level" ], "start_line": 273, "end_line": 324, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 52, "top_nesting_level": 1 }, { "name": "_get_suite_list", "long_name": "_get_suite_list( self , test_module , level , module_name = '__main__' )", "filename": "testing.py", "nloc": 24, "complexity": 9, "token_count": 168, "parameters": [ "self", "test_module", "level", "module_name" ], "start_line": 326, "end_line": 350, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 1 }, { "name": "_touch_ppimported", "long_name": "_touch_ppimported( self , module )", "filename": "testing.py", "nloc": 11, "complexity": 7, "token_count": 98, "parameters": [ "self", "module" ], "start_line": 352, "end_line": 363, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 1 }, { "name": "test", "long_name": "test( self , level = 1 , verbosity = 1 )", "filename": "testing.py", "nloc": 19, "complexity": 6, "token_count": 166, "parameters": [ "self", "level", "verbosity" ], "start_line": 365, "end_line": 390, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "testing.py", "nloc": 20, "complexity": 2, "token_count": 104, "parameters": [ "self" ], "start_line": 392, "end_line": 414, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "remove_ignored_patterns", "long_name": "remove_ignored_patterns( files , pattern )", "filename": "testing.py", "nloc": 7, "complexity": 3, "token_count": 37, "parameters": [ "files", "pattern" ], "start_line": 418, "end_line": 424, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "remove_ignored_files", "long_name": "remove_ignored_files( original , ignored_files , cur_dir )", "filename": "testing.py", "nloc": 12, "complexity": 3, "token_count": 93, "parameters": [ "original", "ignored_files", "cur_dir" ], "start_line": 426, "end_line": 445, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "harvest_modules", "long_name": "harvest_modules( package , ignore = None )", "filename": "testing.py", "nloc": 21, "complexity": 4, "token_count": 134, "parameters": [ "package", "ignore" ], "start_line": 448, "end_line": 484, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 37, "top_nesting_level": 0 }, { "name": "harvest_packages", "long_name": "harvest_packages( package , ignore = None )", "filename": "testing.py", "nloc": 23, "complexity": 6, "token_count": 148, "parameters": [ "package", "ignore" ], "start_line": 487, "end_line": 521, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 35, "top_nesting_level": 0 }, { "name": "harvest_modules_and_packages", "long_name": "harvest_modules_and_packages( package , ignore = None )", "filename": "testing.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "package", "ignore" ], "start_line": 524, "end_line": 530, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "harvest_test_suites", "long_name": "harvest_test_suites( package , ignore = None , level = 10 )", "filename": "testing.py", "nloc": 23, "complexity": 6, "token_count": 113, "parameters": [ "package", "ignore", "level" ], "start_line": 533, "end_line": 568, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 0 }, { "name": "module_test", "long_name": "module_test( mod_name , mod_file , level = 10 )", "filename": "testing.py", "nloc": 10, "complexity": 1, "token_count": 98, "parameters": [ "mod_name", "mod_file", "level" ], "start_line": 571, "end_line": 598, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 28, "top_nesting_level": 0 }, { "name": "module_test_suite", "long_name": "module_test_suite( mod_name , mod_file , level = 10 )", "filename": "testing.py", "nloc": 12, "complexity": 1, "token_count": 103, "parameters": [ "mod_name", "mod_file", "level" ], "start_line": 601, "end_line": 623, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 0 }, { "name": "assert_equal", "long_name": "assert_equal( actual , desired , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 14, "complexity": 7, "token_count": 116, "parameters": [ "actual", "desired", "err_msg", "verbose" ], "start_line": 633, "end_line": 649, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "assert_almost_equal", "long_name": "assert_almost_equal( actual , desired , decimal = 7 , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 14, "complexity": 7, "token_count": 132, "parameters": [ "actual", "desired", "decimal", "err_msg", "verbose" ], "start_line": 652, "end_line": 668, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "assert_approx_equal", "long_name": "assert_approx_equal( actual , desired , significant = 7 , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 23, "complexity": 7, "token_count": 191, "parameters": [ "actual", "desired", "significant", "err_msg", "verbose" ], "start_line": 671, "end_line": 699, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 0 }, { "name": "assert_array_equal", "long_name": "assert_array_equal( x , y , err_msg = '' )", "filename": "testing.py", "nloc": 22, "complexity": 7, "token_count": 232, "parameters": [ "x", "y", "err_msg" ], "start_line": 703, "end_line": 724, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 0 }, { "name": "assert_array_almost_equal", "long_name": "assert_array_almost_equal( x , y , decimal = 6 , err_msg = '' )", "filename": "testing.py", "nloc": 26, "complexity": 6, "token_count": 251, "parameters": [ "x", "y", "decimal", "err_msg" ], "start_line": 727, "end_line": 752, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 0 }, { "name": "assert_array_less", "long_name": "assert_array_less( x , y , err_msg = '' )", "filename": "testing.py", "nloc": 20, "complexity": 5, "token_count": 189, "parameters": [ "x", "y", "err_msg" ], "start_line": 755, "end_line": 774, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "rand", "long_name": "rand( * args )", "filename": "testing.py", "nloc": 7, "complexity": 2, "token_count": 45, "parameters": [ "args" ], "start_line": 777, "end_line": 786, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "output_exception", "long_name": "output_exception( )", "filename": "testing.py", "nloc": 9, "complexity": 2, "token_count": 67, "parameters": [], "start_line": 788, "end_line": 798, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 0 } ], "changed_methods": [], "nloc": 596, "complexity": 152, "token_count": 4285, "diff_parsed": { "added": [ "# These are used by Numeric tests.", "# If Numeric and scipy_base are not available, then some of the", "# functions below will not be available.", "from scipy_base.numerix import alltrue, equal, shape, ravel, around, zeros, Float64, asarray", "from scipy_base.numerix import less_equal, array2string, less, ArrayType", " from scipy_base.numerix import fastumath as math" ], "deleted": [ "try:", " # These are used by Numeric tests.", " # If Numeric and scipy_base are not available, then some of the", " # functions below will not be available.", " from Numeric import alltrue,equal,shape,ravel,around,zeros,Float64,asarray,\\", " less_equal,array2string,less,ArrayType", " # `import scipy_base.fastumath as math` must be at the end of this file.", "except ImportError,msg:", " print msg", " import scipy_base.fastumath as math" ] } } ] }, { "hash": "de989a16fbc7aa77285b92aba139c381941658b2", "msg": "Fixed importing ScipyTest.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-01-13T19:05:54+00:00", "author_timezone": 0, "committer_date": "2005-01-13T19:05:54+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "7d4fc577dac461a0d87361e3074cb2a7f3b43a65" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 5, "insertions": 6, "lines": 11, "files": 1, "dmm_unit_size": null, "dmm_unit_complexity": null, "dmm_unit_interfacing": null, "modified_files": [ { "old_path": "scipy_test/testing.py", "new_path": "scipy_test/testing.py", "filename": "testing.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -34,11 +34,9 @@\n import types\n import imp\n \n-# These are used by Numeric tests.\n-# If Numeric and scipy_base are not available, then some of the\n-# functions below will not be available.\n-from scipy_base.numerix import alltrue, equal, shape, ravel, around, zeros, Float64, asarray\n-from scipy_base.numerix import less_equal, array2string, less, ArrayType\n+#\n+# Imports from scipy_base must be done at the end of this file.\n+#\n \n DEBUG = 0\n \n@@ -793,6 +791,9 @@ def output_exception():\n finally:\n type = value = tb = None # clean up\n \n+from scipy_base.numerix import alltrue, equal, shape, ravel, around, zeros,\\\n+ Float64, asarray, less_equal, array2string, less, ArrayType\n+\n try:\n from scipy_base.numerix import fastumath as math\n except ImportError,msg:\n", "added_lines": 6, "deleted_lines": 5, "source_code": "\"\"\"\nUnit-testing\n------------\n\n ScipyTest -- Scipy tests site manager\n ScipyTestCase -- unittest.TestCase with measure method\n IgnoreException -- raise when checking disabled feature ('ignoring' is displayed)\n set_package_path -- prepend package build directory to path\n set_local_path -- prepend local directory (to tests files) to path\n restore_path -- restore path after set_package_path\n\nTiming tools\n------------\n\n jiffies -- return 1/100ths of a second that the current process has used\n memusage -- virtual memory size in bytes of the running python [linux]\n\nUtility functions\n-----------------\n\n assert_equal -- assert equality\n assert_almost_equal -- assert equality with decimal tolerance\n assert_approx_equal -- assert equality with significant digits tolerance\n assert_array_equal -- assert arrays equality\n assert_array_almost_equal -- assert arrays equality with decimal tolerance\n assert_array_less -- assert arrays less-ordering\n rand -- array of random numbers from given shape\n\n\"\"\"\n\n__all__ = []\n\nimport os,sys,time,glob,string,traceback,unittest\nimport types\nimport imp\n\n#\n# Imports from scipy_base must be done at the end of this file.\n#\n\nDEBUG = 0\n\n__all__.append('set_package_path')\ndef set_package_path(level=1):\n \"\"\" Prepend package directory to sys.path.\n\n set_package_path should be called from a test_file.py that\n satisfies the following tree structure:\n\n //test_file.py\n\n Then the first existing path name from the following list\n\n /build/lib.-\n /..\n\n is prepended to sys.path.\n The caller is responsible for removing this path by using\n\n restore_path()\n \"\"\"\n from distutils.util import get_platform\n from scipy_distutils.misc_util import get_frame\n f = get_frame(level)\n if f.f_locals['__name__']=='__main__':\n testfile = sys.argv[0]\n else:\n testfile = f.f_locals['__file__']\n d = os.path.dirname(os.path.dirname(os.path.abspath(testfile)))\n d1 = os.path.join(d,'build','lib.%s-%s'%(get_platform(),sys.version[:3]))\n if not os.path.isdir(d1):\n d1 = os.path.dirname(d)\n if DEBUG:\n print 'Inserting %r to sys.path' % (d1)\n sys.path.insert(0,d1)\n\n__all__.append('set_local_path')\ndef set_local_path(reldir='', level=1):\n \"\"\" Prepend local directory to sys.path.\n\n The caller is responsible for removing this path by using\n\n restore_path()\n \"\"\"\n from scipy_distutils.misc_util import get_frame\n f = get_frame(level)\n if f.f_locals['__name__']=='__main__':\n testfile = sys.argv[0]\n else:\n testfile = f.f_locals['__file__']\n local_path = os.path.join(os.path.dirname(os.path.abspath(testfile)),reldir)\n if DEBUG:\n print 'Inserting %r to sys.path' % (local_path)\n sys.path.insert(0,local_path)\n\n__all__.append('restore_path')\ndef restore_path():\n if DEBUG:\n print 'Removing %r from sys.path' % (sys.path[0])\n del sys.path[0]\n\n__all__.extend(['jiffies','memusage'])\nif sys.platform[:5]=='linux':\n def jiffies(_proc_pid_stat = '/proc/%s/stat'%(os.getpid()),\n _load_time=time.time()):\n \"\"\" Return number of jiffies (1/100ths of a second) that this\n process has been scheduled in user mode. See man 5 proc. \"\"\"\n try:\n f=open(_proc_pid_stat,'r')\n l = f.readline().split(' ')\n f.close()\n return int(l[13])\n except:\n return int(100*(time.time()-_load_time))\n\n def memusage(_proc_pid_stat = '/proc/%s/stat'%(os.getpid())):\n \"\"\" Return virtual memory size in bytes of the running python.\n \"\"\"\n try:\n f=open(_proc_pid_stat,'r')\n l = f.readline().split(' ')\n f.close()\n return int(l[22])\n except:\n return\nelse:\n # os.getpid is not in all platforms available.\n # Using time is safe but inaccurate, especially when process\n # was suspended or sleeping.\n def jiffies(_load_time=time.time()):\n \"\"\" Return number of jiffies (1/100ths of a second) that this\n process has been scheduled in user mode. [Emulation with time.time]. \"\"\"\n return int(100*(time.time()-_load_time))\n\n def memusage():\n \"\"\" Return memory usage of running python. [Not implemented]\"\"\"\n return\n\n__all__.append('ScipyTestCase')\nclass ScipyTestCase (unittest.TestCase):\n\n def measure(self,code_str,times=1):\n \"\"\" Return elapsed time for executing code_str in the\n namespace of the caller for given times.\n \"\"\"\n frame = sys._getframe(1)\n locs,globs = frame.f_locals,frame.f_globals\n code = compile(code_str,\n 'ScipyTestCase runner for '+self.__class__.__name__,\n 'exec')\n i = 0\n elapsed = jiffies()\n while i>> ScipyTest().test(level=1,verbosity=2)\n\n is package name or its module object.\n\n Package is supposed to contain a directory tests/\n with test_*.py files where * refers to the names of submodules.\n\n test_*.py files are supposed to define a classes, derived\n from ScipyTestCase or unittest.TestCase, with methods having\n names starting with test or bench or check.\n\n And that is it! No need to implement test or test_suite functions\n in each .py file.\n\n Also old styled test_suite(level=1) hooks are supported but\n soon to be removed.\n \"\"\"\n def __init__(self, package='__main__'):\n self.package = package\n\n def _module_str(self, module):\n filename = module.__file__[-30:]\n if filename!=module.__file__:\n filename = '...'+filename\n return '' % (`module.__name__`, `filename`)\n\n def _get_method_names(self,clsobj,level):\n names = []\n for mthname in _get_all_method_names(clsobj):\n if mthname[:5] not in ['bench','check'] \\\n and mthname[:4] not in ['test']:\n continue\n mth = getattr(clsobj, mthname)\n if type(mth) is not types.MethodType:\n continue\n d = mth.im_func.func_defaults\n if d is not None:\n mthlevel = d[0]\n else:\n mthlevel = 1\n if level>=mthlevel:\n if mthname not in names:\n names.append(mthname)\n for base in clsobj.__bases__:\n for n in self._get_method_names(base,level):\n if n not in names:\n names.append(n) \n return names\n\n def _get_module_tests(self,module,level):\n mstr = self._module_str\n d,f = os.path.split(module.__file__)\n\n short_module_name = os.path.splitext(os.path.basename(f))[0]\n if short_module_name=='__init__':\n short_module_name = module.__name__.split('.')[-1]\n\n test_dir = os.path.join(d,'tests')\n test_file = os.path.join(test_dir,'test_'+short_module_name+'.py')\n\n local_test_dir = os.path.join(os.getcwd(),'tests')\n local_test_file = os.path.join(local_test_dir,\n 'test_'+short_module_name+'.py')\n if os.path.basename(os.path.dirname(local_test_dir)) \\\n == os.path.basename(os.path.dirname(test_dir)) \\\n and os.path.isfile(local_test_file):\n test_file = local_test_file\n\n if not os.path.isfile(test_file):\n if short_module_name[:5]=='info_' \\\n and short_module_name[5:]==module.__name__.split('.')[-2]:\n return []\n if short_module_name in ['__cvs_version__','__svn_version__']:\n return []\n if short_module_name[-8:]=='_version' \\\n and short_module_name[:-8]==module.__name__.split('.')[-2]:\n return []\n print ' !! No test file %r found for %s' \\\n % (os.path.basename(test_file), mstr(module))\n return []\n\n try:\n if sys.version[:3]=='2.1':\n # Workaround for Python 2.1 .pyc file generator bug\n import random\n pref = '-nopyc'+`random.randint(1,100)`\n else:\n pref = ''\n f = open(test_file,'r')\n test_module = imp.load_module(\\\n module.__name__+'.test_'+short_module_name+pref,\n f, test_file+pref,('.py', 'r', 1))\n f.close()\n if sys.version[:3]=='2.1' and os.path.isfile(test_file+pref+'c'):\n os.remove(test_file+pref+'c')\n except:\n print ' !! FAILURE importing tests for ', mstr(module)\n print ' ',\n output_exception()\n return []\n return self._get_suite_list(test_module, level, module.__name__)\n\n def _get_suite_list(self, test_module, level, module_name='__main__'):\n mstr = self._module_str\n if hasattr(test_module,'test_suite'):\n # Using old styled test suite\n try:\n total_suite = test_module.test_suite(level)\n return total_suite._tests\n except:\n print ' !! FAILURE building tests for ', mstr(test_module)\n print ' ',\n output_exception()\n return []\n suite_list = []\n for name in dir(test_module):\n obj = getattr(test_module, name)\n if type(obj) is not type(unittest.TestCase) \\\n or not issubclass(obj, unittest.TestCase) \\\n or obj.__name__[:4] != 'test':\n continue\n for mthname in self._get_method_names(obj,level):\n suite = obj(mthname)\n if getattr(suite,'isrunnable',lambda mthname:1)(mthname):\n suite_list.append(suite)\n print ' Found',len(suite_list),'tests for',module_name\n return suite_list\n\n def _touch_ppimported(self, module):\n from scipy_base.ppimport import _ModuleLoader\n if os.path.isdir(os.path.join(os.path.dirname(module.__file__),'tests')):\n # only touching those modules that have tests/ directory\n try: module._pliuh_plauh\n except AttributeError: pass\n for name in dir(module):\n obj = getattr(module,name)\n if isinstance(obj,_ModuleLoader) \\\n and not hasattr(obj,'_ppimport_module') \\\n and not hasattr(obj,'_ppimport_exc_info'):\n self._touch_ppimported(obj)\n\n def test(self,level=1,verbosity=1):\n \"\"\" Run Scipy module test suite with level and verbosity.\n \"\"\"\n if type(self.package) is type(''):\n exec 'import %s as this_package' % (self.package)\n else:\n this_package = self.package\n\n self._touch_ppimported(this_package)\n\n package_name = this_package.__name__\n\n suites = []\n for name, module in sys.modules.items():\n if package_name != name[:len(package_name)] \\\n or module is None \\\n or os.path.basename(os.path.dirname(module.__file__))=='tests':\n continue\n suites.extend(self._get_module_tests(module, level))\n\n suites.extend(self._get_suite_list(sys.modules[package_name], level))\n\n all_tests = unittest.TestSuite(suites)\n runner = unittest.TextTestRunner(verbosity=verbosity)\n runner.run(all_tests)\n return runner\n\n def run(self):\n \"\"\" Run Scipy module test suite with level and verbosity\n taken from sys.argv. Requires optparse module.\n \"\"\"\n try:\n from optparse import OptionParser\n except ImportError:\n print 'Failed to import optparse module, ignoring.'\n return self.test()\n usage = r'usage: %prog [-v ] [-l ]'\n parser = OptionParser(usage)\n parser.add_option(\"-v\", \"--verbosity\",\n action=\"store\",\n dest=\"verbosity\",\n default=1,\n type='int')\n parser.add_option(\"-l\", \"--level\",\n action=\"store\",\n dest=\"level\",\n default=1,\n type='int')\n (options, args) = parser.parse_args()\n self.test(options.level,options.verbosity)\n\n#------------\n \ndef remove_ignored_patterns(files,pattern):\n from fnmatch import fnmatch\n good_files = []\n for file in files:\n if not fnmatch(file,pattern):\n good_files.append(file)\n return good_files\n\ndef remove_ignored_files(original,ignored_files,cur_dir):\n \"\"\" This is actually expanded to do pattern matching.\n\n \"\"\"\n if not ignored_files: ignored_files = []\n ignored_modules = map(lambda x: x+'.py',ignored_files)\n ignored_packages = ignored_files[:]\n # always ignore setup.py and __init__.py files\n ignored_files = ['setup.py','setup_*.py','__init__.py']\n ignored_files += ignored_modules + ignored_packages\n ignored_files = map(lambda x,cur_dir=cur_dir: os.path.join(cur_dir,x),\n ignored_files)\n #print 'ignored:', ignored_files\n #good_files = filter(lambda x,ignored = ignored_files: x not in ignored,\n # original)\n good_files = original\n for pattern in ignored_files:\n good_files = remove_ignored_patterns(good_files,pattern)\n\n return good_files\n\n__all__.append('harvest_modules')\ndef harvest_modules(package,ignore=None):\n \"\"\"* Retreive a list of all modules that live within a package.\n\n Only retreive files that are immediate children of the\n package -- do not recurse through child packages or\n directories. The returned list contains actual modules, not\n just their names.\n *\"\"\"\n d,f = os.path.split(package.__file__)\n\n # go through the directory and import every py file there.\n common_dir = os.path.join(d,'*.py')\n py_files = glob.glob(common_dir)\n #py_files.remove(os.path.join(d,'__init__.py'))\n #py_files.remove(os.path.join(d,'setup.py'))\n\n py_files = remove_ignored_files(py_files,ignore,d)\n #print 'py_files:', py_files\n try:\n prefix = package.__name__\n except:\n prefix = ''\n\n all_modules = []\n for file in py_files:\n d,f = os.path.split(file)\n base,ext = os.path.splitext(f)\n mod = prefix + '.' + base\n #print 'module: import ' + mod\n try:\n exec ('import ' + mod)\n all_modules.append(eval(mod))\n except:\n print 'FAILURE to import ' + mod\n output_exception()\n\n return all_modules\n\n__all__.append('harvest_packages')\ndef harvest_packages(package,ignore = None):\n \"\"\" Retreive a list of all sub-packages that live within a package.\n\n Only retreive packages that are immediate children of this\n package -- do not recurse through child packages or\n directories. The returned list contains actual package objects, not\n just their names.\n \"\"\"\n join = os.path.join\n\n d,f = os.path.split(package.__file__)\n\n common_dir = os.path.abspath(d)\n all_files = os.listdir(d)\n\n all_files = remove_ignored_files(all_files,ignore,'')\n #print 'all_files:', all_files\n try:\n prefix = package.__name__\n except:\n prefix = ''\n all_packages = []\n for directory in all_files:\n path = join(common_dir,directory)\n if os.path.isdir(path) and \\\n os.path.exists(join(path,'__init__.py')):\n sub_package = prefix + '.' + directory\n #print 'sub-package import ' + sub_package\n try:\n exec ('import ' + sub_package)\n all_packages.append(eval(sub_package))\n except:\n print 'FAILURE to import ' + sub_package\n output_exception()\n return all_packages\n\n__all__.append('harvest_modules_and_packages')\ndef harvest_modules_and_packages(package,ignore=None):\n \"\"\" Retreive list of all packages and modules that live within a package.\n\n See harvest_packages() and harvest_modules()\n \"\"\"\n all = harvest_modules(package,ignore) + harvest_packages(package,ignore)\n return all\n\n__all__.append('harvest_test_suites')\ndef harvest_test_suites(package,ignore = None,level=10):\n \"\"\"\n package -- the module to test. This is an actual module object\n (not a string)\n ignore -- a list of module names to omit from the tests\n level -- a value between 1 and 10. 1 will run the minimum number\n of tests. This is a fast \"smoke test\". Tests that take\n longer to run should have higher numbers ranging up to 10.\n \"\"\"\n suites=[]\n test_modules = harvest_modules_and_packages(package,ignore)\n #for i in test_modules:\n # print i.__name__\n for module in test_modules:\n if hasattr(module,'test_suite'):\n try:\n suite = module.test_suite(level=level)\n if suite:\n suites.append(suite)\n else:\n print \" !! FAILURE without error - shouldn't happen\",\n print module.__name__\n except:\n print ' !! FAILURE building test for ', module.__name__\n print ' ',\n output_exception()\n else:\n try:\n print 'No test suite found for ', module.__name__\n except AttributeError:\n # __version__.py getting replaced by a string throws a kink\n # in checking for modules, so we think is a module has\n # actually been overwritten\n print 'No test suite found for ', str(module)\n total_suite = unittest.TestSuite(suites)\n return total_suite\n\n__all__.append('module_test')\ndef module_test(mod_name,mod_file,level=10):\n \"\"\"*\n\n *\"\"\"\n #print 'testing', mod_name\n d,f = os.path.split(mod_file)\n\n # insert the tests directory to the python path\n test_dir = os.path.join(d,'tests')\n sys.path.insert(0,test_dir)\n\n # call the \"test_xxx.test()\" function for the appropriate\n # module.\n\n # This should deal with package naming issues correctly\n short_mod_name = string.split(mod_name,'.')[-1]\n test_module = 'test_' + short_mod_name\n test_string = 'import %s;reload(%s);%s.test(%d)' % \\\n ((test_module,)*3 + (level,))\n\n # This would be better cause it forces a reload of the orginal\n # module. It doesn't behave with packages however.\n #test_string = 'reload(%s);import %s;reload(%s);%s.test(%d)' % \\\n # ((mod_name,) + (test_module,)*3)\n exec(test_string)\n\n # remove test directory from python path.\n sys.path = sys.path[1:]\n\n__all__.append('module_test_suite')\ndef module_test_suite(mod_name,mod_file,level=10):\n #try:\n print ' creating test suite for:', mod_name\n d,f = os.path.split(mod_file)\n\n # insert the tests directory to the python path\n test_dir = os.path.join(d,'tests')\n sys.path.insert(0,test_dir)\n\n # call the \"test_xxx.test()\" function for the appropriate\n # module.\n\n # This should deal with package naming issues correctly\n short_mod_name = string.split(mod_name,'.')[-1]\n test_module = 'test_' + short_mod_name\n test_string = 'import %s;reload(%s);suite = %s.test_suite(%d)' % \\\n ((test_module,)*3+(level,))\n #print test_string\n exec(test_string)\n\n # remove test directory from python path.\n sys.path = sys.path[1:]\n return suite\n #except:\n # print ' !! FAILURE loading test suite from', test_module, ':'\n # print ' ',\n # output_exception()\n\n\n# Utility function to facilitate testing.\n\n__all__.append('assert_equal')\ndef assert_equal(actual,desired,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n \"\"\"\n if isinstance(actual, ArrayType) or isinstance(desired, ArrayType):\n return assert_array_equal(actual, desired, err_msg)\n msg = '\\nItems are not equal:\\n' + err_msg\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert desired == actual, msg\n\n__all__.append('assert_almost_equal')\ndef assert_almost_equal(actual,desired,decimal=7,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n \"\"\"\n if isinstance(actual, ArrayType) or isinstance(desired, ArrayType):\n return assert_array_almost_equal(actual, desired, decimal, err_msg)\n msg = '\\nItems are not equal:\\n' + err_msg\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert round(abs(desired - actual),decimal) == 0, msg\n\n__all__.append('assert_approx_equal')\ndef assert_approx_equal(actual,desired,significant=7,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n Approximately equal is defined as the number of significant digits\n correct\n \"\"\"\n msg = '\\nItems are not equal to %d significant digits:\\n' % significant\n msg += err_msg\n actual, desired = map(float, (actual, desired))\n # Normalized the numbers to be in range (-10.0,10.0)\n scale = pow(10,math.floor(math.log10(0.5*(abs(desired)+abs(actual)))))\n try:\n sc_desired = desired/scale\n except ZeroDivisionError:\n sc_desired = 0.0\n try:\n sc_actual = actual/scale\n except ZeroDivisionError:\n sc_actual = 0.0\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert math.fabs(sc_desired - sc_actual) < pow(10.,-1*significant), msg\n\n\n__all__.append('assert_array_equal')\ndef assert_array_equal(x,y,err_msg=''):\n x,y = asarray(x), asarray(y)\n msg = '\\nArrays are not equal'\n try:\n assert 0 in [len(shape(x)),len(shape(y))] \\\n or (len(shape(x))==len(shape(y)) and \\\n alltrue(equal(shape(x),shape(y)))),\\\n msg + ' (shapes %s, %s mismatch):\\n\\t' \\\n % (shape(x),shape(y)) + err_msg\n reduced = ravel(equal(x,y))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=16)\n s2 = array2string(y,precision=16)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n raise ValueError, msg\n\n__all__.append('assert_array_almost_equal')\ndef assert_array_almost_equal(x,y,decimal=6,err_msg=''):\n x = asarray(x)\n y = asarray(y)\n msg = '\\nArrays are not almost equal'\n try:\n cond = alltrue(equal(shape(x),shape(y)))\n if not cond:\n msg = msg + ' (shapes mismatch):\\n\\t'\\\n 'Shape of array 1: %s\\n\\tShape of array 2: %s' % (shape(x),shape(y))\n assert cond, msg + '\\n\\t' + err_msg\n reduced = ravel(equal(less_equal(around(abs(x-y),decimal),10.0**(-decimal)),1))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=decimal+1)\n s2 = array2string(y,precision=decimal+1)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n print sys.exc_value\n print shape(x),shape(y)\n print x, y\n raise ValueError, 'arrays are not almost equal'\n\n__all__.append('assert_array_less')\ndef assert_array_less(x,y,err_msg=''):\n x,y = asarray(x), asarray(y)\n msg = '\\nArrays are not less-ordered'\n try:\n assert alltrue(equal(shape(x),shape(y))),\\\n msg + ' (shapes mismatch):\\n\\t' + err_msg\n reduced = ravel(less(x,y))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=16)\n s2 = array2string(y,precision=16)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n print shape(x),shape(y)\n raise ValueError, 'arrays are not less-ordered'\n\n__all__.append('rand')\ndef rand(*args):\n \"\"\" Returns an array of random numbers with the given shape.\n used for testing\n \"\"\"\n import random\n results = zeros(args,Float64)\n f = results.flat\n for i in range(len(f)):\n f[i] = random.random()\n return results\n\ndef output_exception():\n try:\n type, value, tb = sys.exc_info()\n info = traceback.extract_tb(tb)\n #this is more verbose\n #traceback.print_exc()\n filename, lineno, function, text = info[-1] # last line only\n print \"%s:%d: %s: %s (in %s)\" %\\\n (filename, lineno, type.__name__, str(value), function)\n finally:\n type = value = tb = None # clean up\n\nfrom scipy_base.numerix import alltrue, equal, shape, ravel, around, zeros,\\\n Float64, asarray, less_equal, array2string, less, ArrayType\n\ntry:\n from scipy_base.numerix import fastumath as math\nexcept ImportError,msg:\n print msg\n import math\n", "source_code_before": "\"\"\"\nUnit-testing\n------------\n\n ScipyTest -- Scipy tests site manager\n ScipyTestCase -- unittest.TestCase with measure method\n IgnoreException -- raise when checking disabled feature ('ignoring' is displayed)\n set_package_path -- prepend package build directory to path\n set_local_path -- prepend local directory (to tests files) to path\n restore_path -- restore path after set_package_path\n\nTiming tools\n------------\n\n jiffies -- return 1/100ths of a second that the current process has used\n memusage -- virtual memory size in bytes of the running python [linux]\n\nUtility functions\n-----------------\n\n assert_equal -- assert equality\n assert_almost_equal -- assert equality with decimal tolerance\n assert_approx_equal -- assert equality with significant digits tolerance\n assert_array_equal -- assert arrays equality\n assert_array_almost_equal -- assert arrays equality with decimal tolerance\n assert_array_less -- assert arrays less-ordering\n rand -- array of random numbers from given shape\n\n\"\"\"\n\n__all__ = []\n\nimport os,sys,time,glob,string,traceback,unittest\nimport types\nimport imp\n\n# These are used by Numeric tests.\n# If Numeric and scipy_base are not available, then some of the\n# functions below will not be available.\nfrom scipy_base.numerix import alltrue, equal, shape, ravel, around, zeros, Float64, asarray\nfrom scipy_base.numerix import less_equal, array2string, less, ArrayType\n\nDEBUG = 0\n\n__all__.append('set_package_path')\ndef set_package_path(level=1):\n \"\"\" Prepend package directory to sys.path.\n\n set_package_path should be called from a test_file.py that\n satisfies the following tree structure:\n\n //test_file.py\n\n Then the first existing path name from the following list\n\n /build/lib.-\n /..\n\n is prepended to sys.path.\n The caller is responsible for removing this path by using\n\n restore_path()\n \"\"\"\n from distutils.util import get_platform\n from scipy_distutils.misc_util import get_frame\n f = get_frame(level)\n if f.f_locals['__name__']=='__main__':\n testfile = sys.argv[0]\n else:\n testfile = f.f_locals['__file__']\n d = os.path.dirname(os.path.dirname(os.path.abspath(testfile)))\n d1 = os.path.join(d,'build','lib.%s-%s'%(get_platform(),sys.version[:3]))\n if not os.path.isdir(d1):\n d1 = os.path.dirname(d)\n if DEBUG:\n print 'Inserting %r to sys.path' % (d1)\n sys.path.insert(0,d1)\n\n__all__.append('set_local_path')\ndef set_local_path(reldir='', level=1):\n \"\"\" Prepend local directory to sys.path.\n\n The caller is responsible for removing this path by using\n\n restore_path()\n \"\"\"\n from scipy_distutils.misc_util import get_frame\n f = get_frame(level)\n if f.f_locals['__name__']=='__main__':\n testfile = sys.argv[0]\n else:\n testfile = f.f_locals['__file__']\n local_path = os.path.join(os.path.dirname(os.path.abspath(testfile)),reldir)\n if DEBUG:\n print 'Inserting %r to sys.path' % (local_path)\n sys.path.insert(0,local_path)\n\n__all__.append('restore_path')\ndef restore_path():\n if DEBUG:\n print 'Removing %r from sys.path' % (sys.path[0])\n del sys.path[0]\n\n__all__.extend(['jiffies','memusage'])\nif sys.platform[:5]=='linux':\n def jiffies(_proc_pid_stat = '/proc/%s/stat'%(os.getpid()),\n _load_time=time.time()):\n \"\"\" Return number of jiffies (1/100ths of a second) that this\n process has been scheduled in user mode. See man 5 proc. \"\"\"\n try:\n f=open(_proc_pid_stat,'r')\n l = f.readline().split(' ')\n f.close()\n return int(l[13])\n except:\n return int(100*(time.time()-_load_time))\n\n def memusage(_proc_pid_stat = '/proc/%s/stat'%(os.getpid())):\n \"\"\" Return virtual memory size in bytes of the running python.\n \"\"\"\n try:\n f=open(_proc_pid_stat,'r')\n l = f.readline().split(' ')\n f.close()\n return int(l[22])\n except:\n return\nelse:\n # os.getpid is not in all platforms available.\n # Using time is safe but inaccurate, especially when process\n # was suspended or sleeping.\n def jiffies(_load_time=time.time()):\n \"\"\" Return number of jiffies (1/100ths of a second) that this\n process has been scheduled in user mode. [Emulation with time.time]. \"\"\"\n return int(100*(time.time()-_load_time))\n\n def memusage():\n \"\"\" Return memory usage of running python. [Not implemented]\"\"\"\n return\n\n__all__.append('ScipyTestCase')\nclass ScipyTestCase (unittest.TestCase):\n\n def measure(self,code_str,times=1):\n \"\"\" Return elapsed time for executing code_str in the\n namespace of the caller for given times.\n \"\"\"\n frame = sys._getframe(1)\n locs,globs = frame.f_locals,frame.f_globals\n code = compile(code_str,\n 'ScipyTestCase runner for '+self.__class__.__name__,\n 'exec')\n i = 0\n elapsed = jiffies()\n while i>> ScipyTest().test(level=1,verbosity=2)\n\n is package name or its module object.\n\n Package is supposed to contain a directory tests/\n with test_*.py files where * refers to the names of submodules.\n\n test_*.py files are supposed to define a classes, derived\n from ScipyTestCase or unittest.TestCase, with methods having\n names starting with test or bench or check.\n\n And that is it! No need to implement test or test_suite functions\n in each .py file.\n\n Also old styled test_suite(level=1) hooks are supported but\n soon to be removed.\n \"\"\"\n def __init__(self, package='__main__'):\n self.package = package\n\n def _module_str(self, module):\n filename = module.__file__[-30:]\n if filename!=module.__file__:\n filename = '...'+filename\n return '' % (`module.__name__`, `filename`)\n\n def _get_method_names(self,clsobj,level):\n names = []\n for mthname in _get_all_method_names(clsobj):\n if mthname[:5] not in ['bench','check'] \\\n and mthname[:4] not in ['test']:\n continue\n mth = getattr(clsobj, mthname)\n if type(mth) is not types.MethodType:\n continue\n d = mth.im_func.func_defaults\n if d is not None:\n mthlevel = d[0]\n else:\n mthlevel = 1\n if level>=mthlevel:\n if mthname not in names:\n names.append(mthname)\n for base in clsobj.__bases__:\n for n in self._get_method_names(base,level):\n if n not in names:\n names.append(n) \n return names\n\n def _get_module_tests(self,module,level):\n mstr = self._module_str\n d,f = os.path.split(module.__file__)\n\n short_module_name = os.path.splitext(os.path.basename(f))[0]\n if short_module_name=='__init__':\n short_module_name = module.__name__.split('.')[-1]\n\n test_dir = os.path.join(d,'tests')\n test_file = os.path.join(test_dir,'test_'+short_module_name+'.py')\n\n local_test_dir = os.path.join(os.getcwd(),'tests')\n local_test_file = os.path.join(local_test_dir,\n 'test_'+short_module_name+'.py')\n if os.path.basename(os.path.dirname(local_test_dir)) \\\n == os.path.basename(os.path.dirname(test_dir)) \\\n and os.path.isfile(local_test_file):\n test_file = local_test_file\n\n if not os.path.isfile(test_file):\n if short_module_name[:5]=='info_' \\\n and short_module_name[5:]==module.__name__.split('.')[-2]:\n return []\n if short_module_name in ['__cvs_version__','__svn_version__']:\n return []\n if short_module_name[-8:]=='_version' \\\n and short_module_name[:-8]==module.__name__.split('.')[-2]:\n return []\n print ' !! No test file %r found for %s' \\\n % (os.path.basename(test_file), mstr(module))\n return []\n\n try:\n if sys.version[:3]=='2.1':\n # Workaround for Python 2.1 .pyc file generator bug\n import random\n pref = '-nopyc'+`random.randint(1,100)`\n else:\n pref = ''\n f = open(test_file,'r')\n test_module = imp.load_module(\\\n module.__name__+'.test_'+short_module_name+pref,\n f, test_file+pref,('.py', 'r', 1))\n f.close()\n if sys.version[:3]=='2.1' and os.path.isfile(test_file+pref+'c'):\n os.remove(test_file+pref+'c')\n except:\n print ' !! FAILURE importing tests for ', mstr(module)\n print ' ',\n output_exception()\n return []\n return self._get_suite_list(test_module, level, module.__name__)\n\n def _get_suite_list(self, test_module, level, module_name='__main__'):\n mstr = self._module_str\n if hasattr(test_module,'test_suite'):\n # Using old styled test suite\n try:\n total_suite = test_module.test_suite(level)\n return total_suite._tests\n except:\n print ' !! FAILURE building tests for ', mstr(test_module)\n print ' ',\n output_exception()\n return []\n suite_list = []\n for name in dir(test_module):\n obj = getattr(test_module, name)\n if type(obj) is not type(unittest.TestCase) \\\n or not issubclass(obj, unittest.TestCase) \\\n or obj.__name__[:4] != 'test':\n continue\n for mthname in self._get_method_names(obj,level):\n suite = obj(mthname)\n if getattr(suite,'isrunnable',lambda mthname:1)(mthname):\n suite_list.append(suite)\n print ' Found',len(suite_list),'tests for',module_name\n return suite_list\n\n def _touch_ppimported(self, module):\n from scipy_base.ppimport import _ModuleLoader\n if os.path.isdir(os.path.join(os.path.dirname(module.__file__),'tests')):\n # only touching those modules that have tests/ directory\n try: module._pliuh_plauh\n except AttributeError: pass\n for name in dir(module):\n obj = getattr(module,name)\n if isinstance(obj,_ModuleLoader) \\\n and not hasattr(obj,'_ppimport_module') \\\n and not hasattr(obj,'_ppimport_exc_info'):\n self._touch_ppimported(obj)\n\n def test(self,level=1,verbosity=1):\n \"\"\" Run Scipy module test suite with level and verbosity.\n \"\"\"\n if type(self.package) is type(''):\n exec 'import %s as this_package' % (self.package)\n else:\n this_package = self.package\n\n self._touch_ppimported(this_package)\n\n package_name = this_package.__name__\n\n suites = []\n for name, module in sys.modules.items():\n if package_name != name[:len(package_name)] \\\n or module is None \\\n or os.path.basename(os.path.dirname(module.__file__))=='tests':\n continue\n suites.extend(self._get_module_tests(module, level))\n\n suites.extend(self._get_suite_list(sys.modules[package_name], level))\n\n all_tests = unittest.TestSuite(suites)\n runner = unittest.TextTestRunner(verbosity=verbosity)\n runner.run(all_tests)\n return runner\n\n def run(self):\n \"\"\" Run Scipy module test suite with level and verbosity\n taken from sys.argv. Requires optparse module.\n \"\"\"\n try:\n from optparse import OptionParser\n except ImportError:\n print 'Failed to import optparse module, ignoring.'\n return self.test()\n usage = r'usage: %prog [-v ] [-l ]'\n parser = OptionParser(usage)\n parser.add_option(\"-v\", \"--verbosity\",\n action=\"store\",\n dest=\"verbosity\",\n default=1,\n type='int')\n parser.add_option(\"-l\", \"--level\",\n action=\"store\",\n dest=\"level\",\n default=1,\n type='int')\n (options, args) = parser.parse_args()\n self.test(options.level,options.verbosity)\n\n#------------\n \ndef remove_ignored_patterns(files,pattern):\n from fnmatch import fnmatch\n good_files = []\n for file in files:\n if not fnmatch(file,pattern):\n good_files.append(file)\n return good_files\n\ndef remove_ignored_files(original,ignored_files,cur_dir):\n \"\"\" This is actually expanded to do pattern matching.\n\n \"\"\"\n if not ignored_files: ignored_files = []\n ignored_modules = map(lambda x: x+'.py',ignored_files)\n ignored_packages = ignored_files[:]\n # always ignore setup.py and __init__.py files\n ignored_files = ['setup.py','setup_*.py','__init__.py']\n ignored_files += ignored_modules + ignored_packages\n ignored_files = map(lambda x,cur_dir=cur_dir: os.path.join(cur_dir,x),\n ignored_files)\n #print 'ignored:', ignored_files\n #good_files = filter(lambda x,ignored = ignored_files: x not in ignored,\n # original)\n good_files = original\n for pattern in ignored_files:\n good_files = remove_ignored_patterns(good_files,pattern)\n\n return good_files\n\n__all__.append('harvest_modules')\ndef harvest_modules(package,ignore=None):\n \"\"\"* Retreive a list of all modules that live within a package.\n\n Only retreive files that are immediate children of the\n package -- do not recurse through child packages or\n directories. The returned list contains actual modules, not\n just their names.\n *\"\"\"\n d,f = os.path.split(package.__file__)\n\n # go through the directory and import every py file there.\n common_dir = os.path.join(d,'*.py')\n py_files = glob.glob(common_dir)\n #py_files.remove(os.path.join(d,'__init__.py'))\n #py_files.remove(os.path.join(d,'setup.py'))\n\n py_files = remove_ignored_files(py_files,ignore,d)\n #print 'py_files:', py_files\n try:\n prefix = package.__name__\n except:\n prefix = ''\n\n all_modules = []\n for file in py_files:\n d,f = os.path.split(file)\n base,ext = os.path.splitext(f)\n mod = prefix + '.' + base\n #print 'module: import ' + mod\n try:\n exec ('import ' + mod)\n all_modules.append(eval(mod))\n except:\n print 'FAILURE to import ' + mod\n output_exception()\n\n return all_modules\n\n__all__.append('harvest_packages')\ndef harvest_packages(package,ignore = None):\n \"\"\" Retreive a list of all sub-packages that live within a package.\n\n Only retreive packages that are immediate children of this\n package -- do not recurse through child packages or\n directories. The returned list contains actual package objects, not\n just their names.\n \"\"\"\n join = os.path.join\n\n d,f = os.path.split(package.__file__)\n\n common_dir = os.path.abspath(d)\n all_files = os.listdir(d)\n\n all_files = remove_ignored_files(all_files,ignore,'')\n #print 'all_files:', all_files\n try:\n prefix = package.__name__\n except:\n prefix = ''\n all_packages = []\n for directory in all_files:\n path = join(common_dir,directory)\n if os.path.isdir(path) and \\\n os.path.exists(join(path,'__init__.py')):\n sub_package = prefix + '.' + directory\n #print 'sub-package import ' + sub_package\n try:\n exec ('import ' + sub_package)\n all_packages.append(eval(sub_package))\n except:\n print 'FAILURE to import ' + sub_package\n output_exception()\n return all_packages\n\n__all__.append('harvest_modules_and_packages')\ndef harvest_modules_and_packages(package,ignore=None):\n \"\"\" Retreive list of all packages and modules that live within a package.\n\n See harvest_packages() and harvest_modules()\n \"\"\"\n all = harvest_modules(package,ignore) + harvest_packages(package,ignore)\n return all\n\n__all__.append('harvest_test_suites')\ndef harvest_test_suites(package,ignore = None,level=10):\n \"\"\"\n package -- the module to test. This is an actual module object\n (not a string)\n ignore -- a list of module names to omit from the tests\n level -- a value between 1 and 10. 1 will run the minimum number\n of tests. This is a fast \"smoke test\". Tests that take\n longer to run should have higher numbers ranging up to 10.\n \"\"\"\n suites=[]\n test_modules = harvest_modules_and_packages(package,ignore)\n #for i in test_modules:\n # print i.__name__\n for module in test_modules:\n if hasattr(module,'test_suite'):\n try:\n suite = module.test_suite(level=level)\n if suite:\n suites.append(suite)\n else:\n print \" !! FAILURE without error - shouldn't happen\",\n print module.__name__\n except:\n print ' !! FAILURE building test for ', module.__name__\n print ' ',\n output_exception()\n else:\n try:\n print 'No test suite found for ', module.__name__\n except AttributeError:\n # __version__.py getting replaced by a string throws a kink\n # in checking for modules, so we think is a module has\n # actually been overwritten\n print 'No test suite found for ', str(module)\n total_suite = unittest.TestSuite(suites)\n return total_suite\n\n__all__.append('module_test')\ndef module_test(mod_name,mod_file,level=10):\n \"\"\"*\n\n *\"\"\"\n #print 'testing', mod_name\n d,f = os.path.split(mod_file)\n\n # insert the tests directory to the python path\n test_dir = os.path.join(d,'tests')\n sys.path.insert(0,test_dir)\n\n # call the \"test_xxx.test()\" function for the appropriate\n # module.\n\n # This should deal with package naming issues correctly\n short_mod_name = string.split(mod_name,'.')[-1]\n test_module = 'test_' + short_mod_name\n test_string = 'import %s;reload(%s);%s.test(%d)' % \\\n ((test_module,)*3 + (level,))\n\n # This would be better cause it forces a reload of the orginal\n # module. It doesn't behave with packages however.\n #test_string = 'reload(%s);import %s;reload(%s);%s.test(%d)' % \\\n # ((mod_name,) + (test_module,)*3)\n exec(test_string)\n\n # remove test directory from python path.\n sys.path = sys.path[1:]\n\n__all__.append('module_test_suite')\ndef module_test_suite(mod_name,mod_file,level=10):\n #try:\n print ' creating test suite for:', mod_name\n d,f = os.path.split(mod_file)\n\n # insert the tests directory to the python path\n test_dir = os.path.join(d,'tests')\n sys.path.insert(0,test_dir)\n\n # call the \"test_xxx.test()\" function for the appropriate\n # module.\n\n # This should deal with package naming issues correctly\n short_mod_name = string.split(mod_name,'.')[-1]\n test_module = 'test_' + short_mod_name\n test_string = 'import %s;reload(%s);suite = %s.test_suite(%d)' % \\\n ((test_module,)*3+(level,))\n #print test_string\n exec(test_string)\n\n # remove test directory from python path.\n sys.path = sys.path[1:]\n return suite\n #except:\n # print ' !! FAILURE loading test suite from', test_module, ':'\n # print ' ',\n # output_exception()\n\n\n# Utility function to facilitate testing.\n\n__all__.append('assert_equal')\ndef assert_equal(actual,desired,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n \"\"\"\n if isinstance(actual, ArrayType) or isinstance(desired, ArrayType):\n return assert_array_equal(actual, desired, err_msg)\n msg = '\\nItems are not equal:\\n' + err_msg\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert desired == actual, msg\n\n__all__.append('assert_almost_equal')\ndef assert_almost_equal(actual,desired,decimal=7,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n \"\"\"\n if isinstance(actual, ArrayType) or isinstance(desired, ArrayType):\n return assert_array_almost_equal(actual, desired, decimal, err_msg)\n msg = '\\nItems are not equal:\\n' + err_msg\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert round(abs(desired - actual),decimal) == 0, msg\n\n__all__.append('assert_approx_equal')\ndef assert_approx_equal(actual,desired,significant=7,err_msg='',verbose=1):\n \"\"\" Raise an assertion if two items are not\n equal. I think this should be part of unittest.py\n Approximately equal is defined as the number of significant digits\n correct\n \"\"\"\n msg = '\\nItems are not equal to %d significant digits:\\n' % significant\n msg += err_msg\n actual, desired = map(float, (actual, desired))\n # Normalized the numbers to be in range (-10.0,10.0)\n scale = pow(10,math.floor(math.log10(0.5*(abs(desired)+abs(actual)))))\n try:\n sc_desired = desired/scale\n except ZeroDivisionError:\n sc_desired = 0.0\n try:\n sc_actual = actual/scale\n except ZeroDivisionError:\n sc_actual = 0.0\n try:\n if ( verbose and len(repr(desired)) < 100 and len(repr(actual)) ):\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n except:\n msg = msg \\\n + 'DESIRED: ' + repr(desired) \\\n + '\\nACTUAL: ' + repr(actual)\n assert math.fabs(sc_desired - sc_actual) < pow(10.,-1*significant), msg\n\n\n__all__.append('assert_array_equal')\ndef assert_array_equal(x,y,err_msg=''):\n x,y = asarray(x), asarray(y)\n msg = '\\nArrays are not equal'\n try:\n assert 0 in [len(shape(x)),len(shape(y))] \\\n or (len(shape(x))==len(shape(y)) and \\\n alltrue(equal(shape(x),shape(y)))),\\\n msg + ' (shapes %s, %s mismatch):\\n\\t' \\\n % (shape(x),shape(y)) + err_msg\n reduced = ravel(equal(x,y))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=16)\n s2 = array2string(y,precision=16)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n raise ValueError, msg\n\n__all__.append('assert_array_almost_equal')\ndef assert_array_almost_equal(x,y,decimal=6,err_msg=''):\n x = asarray(x)\n y = asarray(y)\n msg = '\\nArrays are not almost equal'\n try:\n cond = alltrue(equal(shape(x),shape(y)))\n if not cond:\n msg = msg + ' (shapes mismatch):\\n\\t'\\\n 'Shape of array 1: %s\\n\\tShape of array 2: %s' % (shape(x),shape(y))\n assert cond, msg + '\\n\\t' + err_msg\n reduced = ravel(equal(less_equal(around(abs(x-y),decimal),10.0**(-decimal)),1))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=decimal+1)\n s2 = array2string(y,precision=decimal+1)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n print sys.exc_value\n print shape(x),shape(y)\n print x, y\n raise ValueError, 'arrays are not almost equal'\n\n__all__.append('assert_array_less')\ndef assert_array_less(x,y,err_msg=''):\n x,y = asarray(x), asarray(y)\n msg = '\\nArrays are not less-ordered'\n try:\n assert alltrue(equal(shape(x),shape(y))),\\\n msg + ' (shapes mismatch):\\n\\t' + err_msg\n reduced = ravel(less(x,y))\n cond = alltrue(reduced)\n if not cond:\n s1 = array2string(x,precision=16)\n s2 = array2string(y,precision=16)\n if len(s1)>120: s1 = s1[:120] + '...'\n if len(s2)>120: s2 = s2[:120] + '...'\n match = 100-100.0*reduced.tolist().count(1)/len(reduced)\n msg = msg + ' (mismatch %s%%):\\n\\tArray 1: %s\\n\\tArray 2: %s' % (match,s1,s2)\n assert cond,\\\n msg + '\\n\\t' + err_msg\n except ValueError:\n print shape(x),shape(y)\n raise ValueError, 'arrays are not less-ordered'\n\n__all__.append('rand')\ndef rand(*args):\n \"\"\" Returns an array of random numbers with the given shape.\n used for testing\n \"\"\"\n import random\n results = zeros(args,Float64)\n f = results.flat\n for i in range(len(f)):\n f[i] = random.random()\n return results\n\ndef output_exception():\n try:\n type, value, tb = sys.exc_info()\n info = traceback.extract_tb(tb)\n #this is more verbose\n #traceback.print_exc()\n filename, lineno, function, text = info[-1] # last line only\n print \"%s:%d: %s: %s (in %s)\" %\\\n (filename, lineno, type.__name__, str(value), function)\n finally:\n type = value = tb = None # clean up\n\ntry:\n from scipy_base.numerix import fastumath as math\nexcept ImportError,msg:\n print msg\n import math\n", "methods": [ { "name": "set_package_path", "long_name": "set_package_path( level = 1 )", "filename": "testing.py", "nloc": 15, "complexity": 4, "token_count": 146, "parameters": [ "level" ], "start_line": 44, "end_line": 75, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 32, "top_nesting_level": 0 }, { "name": "set_local_path", "long_name": "set_local_path( reldir = '' , level = 1 )", "filename": "testing.py", "nloc": 11, "complexity": 3, "token_count": 97, "parameters": [ "reldir", "level" ], "start_line": 78, "end_line": 94, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "restore_path", "long_name": "restore_path( )", "filename": "testing.py", "nloc": 4, "complexity": 2, "token_count": 25, "parameters": [], "start_line": 97, "end_line": 100, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "jiffies", "long_name": "jiffies( _proc_pid_stat = '/proc/%s/stat' % ( os . getpid ( )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 23, "parameters": [ "_proc_pid_stat" ], "start_line": 104, "end_line": 105, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "memusage", "long_name": "memusage( _proc_pid_stat = '/proc/%s/stat' % ( os . getpid ( )", "filename": "testing.py", "nloc": 10, "complexity": 2, "token_count": 54, "parameters": [ "_proc_pid_stat" ], "start_line": 116, "end_line": 125, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "jiffies", "long_name": "jiffies( _load_time = time . time ( )", "filename": "testing.py", "nloc": 4, "complexity": 1, "token_count": 27, "parameters": [ "_load_time" ], "start_line": 130, "end_line": 133, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "memusage", "long_name": "memusage( )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 6, "parameters": [], "start_line": 135, "end_line": 137, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "measure", "long_name": "measure( self , code_str , times = 1 )", "filename": "testing.py", "nloc": 13, "complexity": 2, "token_count": 82, "parameters": [ "self", "code_str", "times" ], "start_line": 142, "end_line": 157, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 1 }, { "name": "__call__", "long_name": "__call__( self , result = None )", "filename": "testing.py", "nloc": 24, "complexity": 6, "token_count": 224, "parameters": [ "self", "result" ], "start_line": 159, "end_line": 183, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self , stream )", "filename": "testing.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "self", "stream" ], "start_line": 186, "end_line": 188, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "write", "long_name": "write( self , message )", "filename": "testing.py", "nloc": 6, "complexity": 3, "token_count": 47, "parameters": [ "self", "message" ], "start_line": 189, "end_line": 194, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "writeln", "long_name": "writeln( self , message )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 15, "parameters": [ "self", "message" ], "start_line": 195, "end_line": 196, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "_get_all_method_names", "long_name": "_get_all_method_names( cls )", "filename": "testing.py", "nloc": 8, "complexity": 5, "token_count": 56, "parameters": [ "cls" ], "start_line": 204, "end_line": 211, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , package = '__main__' )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "self", "package" ], "start_line": 235, "end_line": 236, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "_module_str", "long_name": "_module_str( self , module )", "filename": "testing.py", "nloc": 5, "complexity": 2, "token_count": 43, "parameters": [ "self", "module" ], "start_line": 238, "end_line": 242, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "_get_method_names", "long_name": "_get_method_names( self , clsobj , level )", "filename": "testing.py", "nloc": 22, "complexity": 11, "token_count": 142, "parameters": [ "self", "clsobj", "level" ], "start_line": 244, "end_line": 265, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "_get_module_tests", "long_name": "_get_module_tests( self , module , level )", "filename": "testing.py", "nloc": 46, "complexity": 14, "token_count": 428, "parameters": [ "self", "module", "level" ], "start_line": 267, "end_line": 318, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 52, "top_nesting_level": 1 }, { "name": "_get_suite_list", "long_name": "_get_suite_list( self , test_module , level , module_name = '__main__' )", "filename": "testing.py", "nloc": 24, "complexity": 9, "token_count": 168, "parameters": [ "self", "test_module", "level", "module_name" ], "start_line": 320, "end_line": 344, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 1 }, { "name": "_touch_ppimported", "long_name": "_touch_ppimported( self , module )", "filename": "testing.py", "nloc": 11, "complexity": 7, "token_count": 98, "parameters": [ "self", "module" ], "start_line": 346, "end_line": 357, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 1 }, { "name": "test", "long_name": "test( self , level = 1 , verbosity = 1 )", "filename": "testing.py", "nloc": 19, "complexity": 6, "token_count": 166, "parameters": [ "self", "level", "verbosity" ], "start_line": 359, "end_line": 384, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "testing.py", "nloc": 20, "complexity": 2, "token_count": 104, "parameters": [ "self" ], "start_line": 386, "end_line": 408, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "remove_ignored_patterns", "long_name": "remove_ignored_patterns( files , pattern )", "filename": "testing.py", "nloc": 7, "complexity": 3, "token_count": 37, "parameters": [ "files", "pattern" ], "start_line": 412, "end_line": 418, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "remove_ignored_files", "long_name": "remove_ignored_files( original , ignored_files , cur_dir )", "filename": "testing.py", "nloc": 12, "complexity": 3, "token_count": 93, "parameters": [ "original", "ignored_files", "cur_dir" ], "start_line": 420, "end_line": 439, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "harvest_modules", "long_name": "harvest_modules( package , ignore = None )", "filename": "testing.py", "nloc": 21, "complexity": 4, "token_count": 134, "parameters": [ "package", "ignore" ], "start_line": 442, "end_line": 478, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 37, "top_nesting_level": 0 }, { "name": "harvest_packages", "long_name": "harvest_packages( package , ignore = None )", "filename": "testing.py", "nloc": 23, "complexity": 6, "token_count": 148, "parameters": [ "package", "ignore" ], "start_line": 481, "end_line": 515, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 35, "top_nesting_level": 0 }, { "name": "harvest_modules_and_packages", "long_name": "harvest_modules_and_packages( package , ignore = None )", "filename": "testing.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "package", "ignore" ], "start_line": 518, "end_line": 524, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "harvest_test_suites", "long_name": "harvest_test_suites( package , ignore = None , level = 10 )", "filename": "testing.py", "nloc": 23, "complexity": 6, "token_count": 113, "parameters": [ "package", "ignore", "level" ], "start_line": 527, "end_line": 562, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 0 }, { "name": "module_test", "long_name": "module_test( mod_name , mod_file , level = 10 )", "filename": "testing.py", "nloc": 10, "complexity": 1, "token_count": 98, "parameters": [ "mod_name", "mod_file", "level" ], "start_line": 565, "end_line": 592, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 28, "top_nesting_level": 0 }, { "name": "module_test_suite", "long_name": "module_test_suite( mod_name , mod_file , level = 10 )", "filename": "testing.py", "nloc": 12, "complexity": 1, "token_count": 103, "parameters": [ "mod_name", "mod_file", "level" ], "start_line": 595, "end_line": 617, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 0 }, { "name": "assert_equal", "long_name": "assert_equal( actual , desired , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 14, "complexity": 7, "token_count": 116, "parameters": [ "actual", "desired", "err_msg", "verbose" ], "start_line": 627, "end_line": 643, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "assert_almost_equal", "long_name": "assert_almost_equal( actual , desired , decimal = 7 , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 14, "complexity": 7, "token_count": 132, "parameters": [ "actual", "desired", "decimal", "err_msg", "verbose" ], "start_line": 646, "end_line": 662, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "assert_approx_equal", "long_name": "assert_approx_equal( actual , desired , significant = 7 , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 23, "complexity": 7, "token_count": 191, "parameters": [ "actual", "desired", "significant", "err_msg", "verbose" ], "start_line": 665, "end_line": 693, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 0 }, { "name": "assert_array_equal", "long_name": "assert_array_equal( x , y , err_msg = '' )", "filename": "testing.py", "nloc": 22, "complexity": 7, "token_count": 232, "parameters": [ "x", "y", "err_msg" ], "start_line": 697, "end_line": 718, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 0 }, { "name": "assert_array_almost_equal", "long_name": "assert_array_almost_equal( x , y , decimal = 6 , err_msg = '' )", "filename": "testing.py", "nloc": 26, "complexity": 6, "token_count": 251, "parameters": [ "x", "y", "decimal", "err_msg" ], "start_line": 721, "end_line": 746, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 0 }, { "name": "assert_array_less", "long_name": "assert_array_less( x , y , err_msg = '' )", "filename": "testing.py", "nloc": 20, "complexity": 5, "token_count": 189, "parameters": [ "x", "y", "err_msg" ], "start_line": 749, "end_line": 768, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "rand", "long_name": "rand( * args )", "filename": "testing.py", "nloc": 7, "complexity": 2, "token_count": 45, "parameters": [ "args" ], "start_line": 771, "end_line": 780, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "output_exception", "long_name": "output_exception( )", "filename": "testing.py", "nloc": 9, "complexity": 2, "token_count": 67, "parameters": [], "start_line": 782, "end_line": 792, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 0 } ], "methods_before": [ { "name": "set_package_path", "long_name": "set_package_path( level = 1 )", "filename": "testing.py", "nloc": 15, "complexity": 4, "token_count": 146, "parameters": [ "level" ], "start_line": 46, "end_line": 77, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 32, "top_nesting_level": 0 }, { "name": "set_local_path", "long_name": "set_local_path( reldir = '' , level = 1 )", "filename": "testing.py", "nloc": 11, "complexity": 3, "token_count": 97, "parameters": [ "reldir", "level" ], "start_line": 80, "end_line": 96, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "restore_path", "long_name": "restore_path( )", "filename": "testing.py", "nloc": 4, "complexity": 2, "token_count": 25, "parameters": [], "start_line": 99, "end_line": 102, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "jiffies", "long_name": "jiffies( _proc_pid_stat = '/proc/%s/stat' % ( os . getpid ( )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 23, "parameters": [ "_proc_pid_stat" ], "start_line": 106, "end_line": 107, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "memusage", "long_name": "memusage( _proc_pid_stat = '/proc/%s/stat' % ( os . getpid ( )", "filename": "testing.py", "nloc": 10, "complexity": 2, "token_count": 54, "parameters": [ "_proc_pid_stat" ], "start_line": 118, "end_line": 127, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "jiffies", "long_name": "jiffies( _load_time = time . time ( )", "filename": "testing.py", "nloc": 4, "complexity": 1, "token_count": 27, "parameters": [ "_load_time" ], "start_line": 132, "end_line": 135, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "memusage", "long_name": "memusage( )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 6, "parameters": [], "start_line": 137, "end_line": 139, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "measure", "long_name": "measure( self , code_str , times = 1 )", "filename": "testing.py", "nloc": 13, "complexity": 2, "token_count": 82, "parameters": [ "self", "code_str", "times" ], "start_line": 144, "end_line": 159, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 1 }, { "name": "__call__", "long_name": "__call__( self , result = None )", "filename": "testing.py", "nloc": 24, "complexity": 6, "token_count": 224, "parameters": [ "self", "result" ], "start_line": 161, "end_line": 185, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self , stream )", "filename": "testing.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "self", "stream" ], "start_line": 188, "end_line": 190, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "write", "long_name": "write( self , message )", "filename": "testing.py", "nloc": 6, "complexity": 3, "token_count": 47, "parameters": [ "self", "message" ], "start_line": 191, "end_line": 196, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "writeln", "long_name": "writeln( self , message )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 15, "parameters": [ "self", "message" ], "start_line": 197, "end_line": 198, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "_get_all_method_names", "long_name": "_get_all_method_names( cls )", "filename": "testing.py", "nloc": 8, "complexity": 5, "token_count": 56, "parameters": [ "cls" ], "start_line": 206, "end_line": 213, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , package = '__main__' )", "filename": "testing.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "self", "package" ], "start_line": 237, "end_line": 238, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "_module_str", "long_name": "_module_str( self , module )", "filename": "testing.py", "nloc": 5, "complexity": 2, "token_count": 43, "parameters": [ "self", "module" ], "start_line": 240, "end_line": 244, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "_get_method_names", "long_name": "_get_method_names( self , clsobj , level )", "filename": "testing.py", "nloc": 22, "complexity": 11, "token_count": 142, "parameters": [ "self", "clsobj", "level" ], "start_line": 246, "end_line": 267, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "_get_module_tests", "long_name": "_get_module_tests( self , module , level )", "filename": "testing.py", "nloc": 46, "complexity": 14, "token_count": 428, "parameters": [ "self", "module", "level" ], "start_line": 269, "end_line": 320, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 52, "top_nesting_level": 1 }, { "name": "_get_suite_list", "long_name": "_get_suite_list( self , test_module , level , module_name = '__main__' )", "filename": "testing.py", "nloc": 24, "complexity": 9, "token_count": 168, "parameters": [ "self", "test_module", "level", "module_name" ], "start_line": 322, "end_line": 346, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 1 }, { "name": "_touch_ppimported", "long_name": "_touch_ppimported( self , module )", "filename": "testing.py", "nloc": 11, "complexity": 7, "token_count": 98, "parameters": [ "self", "module" ], "start_line": 348, "end_line": 359, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 1 }, { "name": "test", "long_name": "test( self , level = 1 , verbosity = 1 )", "filename": "testing.py", "nloc": 19, "complexity": 6, "token_count": 166, "parameters": [ "self", "level", "verbosity" ], "start_line": 361, "end_line": 386, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "testing.py", "nloc": 20, "complexity": 2, "token_count": 104, "parameters": [ "self" ], "start_line": 388, "end_line": 410, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "remove_ignored_patterns", "long_name": "remove_ignored_patterns( files , pattern )", "filename": "testing.py", "nloc": 7, "complexity": 3, "token_count": 37, "parameters": [ "files", "pattern" ], "start_line": 414, "end_line": 420, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "remove_ignored_files", "long_name": "remove_ignored_files( original , ignored_files , cur_dir )", "filename": "testing.py", "nloc": 12, "complexity": 3, "token_count": 93, "parameters": [ "original", "ignored_files", "cur_dir" ], "start_line": 422, "end_line": 441, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "harvest_modules", "long_name": "harvest_modules( package , ignore = None )", "filename": "testing.py", "nloc": 21, "complexity": 4, "token_count": 134, "parameters": [ "package", "ignore" ], "start_line": 444, "end_line": 480, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 37, "top_nesting_level": 0 }, { "name": "harvest_packages", "long_name": "harvest_packages( package , ignore = None )", "filename": "testing.py", "nloc": 23, "complexity": 6, "token_count": 148, "parameters": [ "package", "ignore" ], "start_line": 483, "end_line": 517, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 35, "top_nesting_level": 0 }, { "name": "harvest_modules_and_packages", "long_name": "harvest_modules_and_packages( package , ignore = None )", "filename": "testing.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "package", "ignore" ], "start_line": 520, "end_line": 526, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "harvest_test_suites", "long_name": "harvest_test_suites( package , ignore = None , level = 10 )", "filename": "testing.py", "nloc": 23, "complexity": 6, "token_count": 113, "parameters": [ "package", "ignore", "level" ], "start_line": 529, "end_line": 564, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 0 }, { "name": "module_test", "long_name": "module_test( mod_name , mod_file , level = 10 )", "filename": "testing.py", "nloc": 10, "complexity": 1, "token_count": 98, "parameters": [ "mod_name", "mod_file", "level" ], "start_line": 567, "end_line": 594, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 28, "top_nesting_level": 0 }, { "name": "module_test_suite", "long_name": "module_test_suite( mod_name , mod_file , level = 10 )", "filename": "testing.py", "nloc": 12, "complexity": 1, "token_count": 103, "parameters": [ "mod_name", "mod_file", "level" ], "start_line": 597, "end_line": 619, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 0 }, { "name": "assert_equal", "long_name": "assert_equal( actual , desired , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 14, "complexity": 7, "token_count": 116, "parameters": [ "actual", "desired", "err_msg", "verbose" ], "start_line": 629, "end_line": 645, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "assert_almost_equal", "long_name": "assert_almost_equal( actual , desired , decimal = 7 , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 14, "complexity": 7, "token_count": 132, "parameters": [ "actual", "desired", "decimal", "err_msg", "verbose" ], "start_line": 648, "end_line": 664, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "assert_approx_equal", "long_name": "assert_approx_equal( actual , desired , significant = 7 , err_msg = '' , verbose = 1 )", "filename": "testing.py", "nloc": 23, "complexity": 7, "token_count": 191, "parameters": [ "actual", "desired", "significant", "err_msg", "verbose" ], "start_line": 667, "end_line": 695, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 0 }, { "name": "assert_array_equal", "long_name": "assert_array_equal( x , y , err_msg = '' )", "filename": "testing.py", "nloc": 22, "complexity": 7, "token_count": 232, "parameters": [ "x", "y", "err_msg" ], "start_line": 699, "end_line": 720, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 0 }, { "name": "assert_array_almost_equal", "long_name": "assert_array_almost_equal( x , y , decimal = 6 , err_msg = '' )", "filename": "testing.py", "nloc": 26, "complexity": 6, "token_count": 251, "parameters": [ "x", "y", "decimal", "err_msg" ], "start_line": 723, "end_line": 748, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 0 }, { "name": "assert_array_less", "long_name": "assert_array_less( x , y , err_msg = '' )", "filename": "testing.py", "nloc": 20, "complexity": 5, "token_count": 189, "parameters": [ "x", "y", "err_msg" ], "start_line": 751, "end_line": 770, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "rand", "long_name": "rand( * args )", "filename": "testing.py", "nloc": 7, "complexity": 2, "token_count": 45, "parameters": [ "args" ], "start_line": 773, "end_line": 782, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "output_exception", "long_name": "output_exception( )", "filename": "testing.py", "nloc": 9, "complexity": 2, "token_count": 67, "parameters": [], "start_line": 784, "end_line": 794, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 0 } ], "changed_methods": [], "nloc": 596, "complexity": 152, "token_count": 4282, "diff_parsed": { "added": [ "#", "# Imports from scipy_base must be done at the end of this file.", "#", "from scipy_base.numerix import alltrue, equal, shape, ravel, around, zeros,\\", " Float64, asarray, less_equal, array2string, less, ArrayType", "" ], "deleted": [ "# These are used by Numeric tests.", "# If Numeric and scipy_base are not available, then some of the", "# functions below will not be available.", "from scipy_base.numerix import alltrue, equal, shape, ravel, around, zeros, Float64, asarray", "from scipy_base.numerix import less_equal, array2string, less, ArrayType" ] } } ] }, { "hash": "a2d94a2384f0acc84d330d1f19c0415e46d88a58", "msg": "Added scipy_base/_compiled_base.c since it is used at build time but\nnot included literally in any Extension, only as a renamed copy:\ne.g. _na_compiled_base.c.", "author": { "name": "jmiller", "email": "jmiller@localhost" }, "committer": { "name": "jmiller", "email": "jmiller@localhost" }, "author_date": "2005-01-28T15:09:13+00:00", "author_timezone": 0, "committer_date": "2005-01-28T15:09:13+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "de989a16fbc7aa77285b92aba139c381941658b2" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 0, "insertions": 1, "lines": 1, "files": 1, "dmm_unit_size": null, "dmm_unit_complexity": null, "dmm_unit_interfacing": null, "modified_files": [ { "old_path": "MANIFEST.in", "new_path": "MANIFEST.in", "filename": "MANIFEST.in", "extension": "in", "change_type": "MODIFY", "diff": "@@ -0,0 +1 @@\n+include scipy_base/_compiled_base.c\n", "added_lines": 1, "deleted_lines": 0, "source_code": "include scipy_base/_compiled_base.c\n", "source_code_before": null, "methods": [], "methods_before": [], "changed_methods": [], "nloc": null, "complexity": null, "token_count": null, "diff_parsed": { "added": [ "include scipy_base/_compiled_base.c" ], "deleted": [] } } ] }, { "hash": "94d664637f86a02327efa902bc29b82dec58679c", "msg": "Fixed bdist_rpm command.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-01-28T16:32:20+00:00", "author_timezone": 0, "committer_date": "2005-01-28T16:32:20+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "a2d94a2384f0acc84d330d1f19c0415e46d88a58" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 54, "insertions": 34, "lines": 88, "files": 1, "dmm_unit_size": 0.0, "dmm_unit_complexity": 0.0, "dmm_unit_interfacing": 1.0, "modified_files": [ { "old_path": "scipy_base/setup_scipy_base.py", "new_path": "scipy_base/setup_scipy_base.py", "filename": "setup_scipy_base.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -1,50 +1,6 @@\n #!/usr/bin/env python\n import os, sys\n from glob import glob\n-import shutil\n-\n-class _CleanUpFile:\n- \"\"\"CleanUpFile deletes the specified filename when self is destroyed.\"\"\"\n- def __init__(self, name):\n- self.name = name\n- def __del__(self):\n- os.remove(self.name)\n- # pass # leave source around for debugging\n-\n-def _temp_copy(_from, _to):\n- \"\"\"temp_copy copies a named file into a named temporary file.\n- The temporary will be deleted when the setupext module is destructed.\n- \"\"\"\n- # Copy the file data from _from to _to\n- s = open(_from).read()\n- open(_to,\"w+\").write(s)\n- # Suppress object rebuild by preserving time stamps.\n- stats = os.stat(_from)\n- os.utime(_to, (stats.st_atime, stats.st_mtime))\n- # Make an object to eliminate the temporary file at exit time.\n- globals()[\"_cleanup_\"+_to] = _CleanUpFile(_to)\n-\n-def _config_compiled_base(package, local_path, numerix_prefix, macro, info):\n- \"\"\"_config_compiled_base returns the Extension object for an\n- Numeric or numarray specific version of _compiled_base.\n- \"\"\"\n- from scipy_distutils.system_info import dict_append\n- from scipy_distutils.core import Extension\n- from scipy_distutils.misc_util import dot_join\n- module = numerix_prefix + \"_compiled_base\"\n- source = module + '.c'\n- _temp_copy(os.path.join(local_path, \"_compiled_base.c\"),\n- os.path.join(local_path, source))\n- sources = [source]\n- sources = [os.path.join(local_path,x) for x in sources]\n- depends = sources\n- ext_args = {'name':dot_join(package, module),\n- 'sources':sources,\n- 'depends':depends,\n- 'define_macros':[(macro,1)],\n- }\n- dict_append(ext_args,**info)\n- return Extension(**ext_args)\n \n def configuration(parent_package='',parent_path=None):\n from scipy_distutils.system_info import get_info, dict_append\n@@ -52,6 +8,8 @@ def configuration(parent_package='',parent_path=None):\n from scipy_distutils.misc_util import get_path,default_config_dict,dot_join\n from scipy_distutils.misc_util import get_path,default_config_dict,\\\n dot_join,SourceGenerator\n+ from distutils.dep_util import newer\n+ from distutils.file_util import copy_file\n \n package = 'scipy_base'\n local_path = get_path(__name__,parent_path)\n@@ -99,19 +57,41 @@ def configuration(parent_package='',parent_path=None):\n 'depends': umath_c_sources}\n dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n- \n- # _compiled_base module for Numeric: _nc_compiled_base\n- _nc_compiled_base_ext = _config_compiled_base(\n- package, local_path, \"_nc\", \"NUMERIC\", numpy_info)\n- config['ext_modules'].append(_nc_compiled_base_ext)\n \n- # _compiled_base module for numarray: _na_compiled_base\n+\n+ # _nc_compiled_base and _na_compiled_base modules\n+\n+ _compiled_base_c = os.path.join(local_path,'_compiled_base.c')\n+ def compiled_base_c(ext,src_dir):\n+ source = os.path.join(src_dir,ext.name.split('.')[-1] + '.c')\n+ if newer(_compiled_base_c,source):\n+ copy_file(_compiled_base_c,source)\n+ return [source]\n+\n+ ext_args = {}\n+ dict_append(ext_args,\n+ name=dot_join(package,'_nc_compiled_base'),\n+ sources = [compiled_base_c],\n+ depends = [_compiled_base_c],\n+ define_macros = [('NUMERIC',None)],\n+ include_dirs = [local_path]\n+ )\n+ dict_append(ext_args,**numpy_info)\n+ config['ext_modules'].append(Extension(**ext_args))\n+\n numarray_info = get_info('numarray')\n if numarray_info:\n- _na_compiled_base_ext = _config_compiled_base(\n- package, local_path, \"_na\", \"NUMARRAY\", numarray_info)\n- config['ext_modules'].append(_na_compiled_base_ext)\n- \n+ ext_args = {}\n+ dict_append(ext_args,\n+ name=dot_join(package,'_na_compiled_base'),\n+ sources = [compiled_base_c],\n+ depends = [_compiled_base_c],\n+ define_macros = [('NUMARRAY',None)],\n+ include_dirs = [local_path]\n+ )\n+ dict_append(ext_args,**numarray_info)\n+ config['ext_modules'].append(Extension(**ext_args))\n+\n # display_test module\n sources = [os.path.join(local_path,'src','display_test.c')]\n x11 = get_info('x11')\n", "added_lines": 34, "deleted_lines": 54, "source_code": "#!/usr/bin/env python\nimport os, sys\nfrom glob import glob\n \ndef configuration(parent_package='',parent_path=None):\n from scipy_distutils.system_info import get_info, dict_append\n from scipy_distutils.core import Extension\n from scipy_distutils.misc_util import get_path,default_config_dict,dot_join\n from scipy_distutils.misc_util import get_path,default_config_dict,\\\n dot_join,SourceGenerator\n from distutils.dep_util import newer\n from distutils.file_util import copy_file\n\n package = 'scipy_base'\n local_path = get_path(__name__,parent_path)\n config = default_config_dict(package,parent_package)\n\n numpy_info = get_info('numpy',notfound_action=2)\n\n # extra_compile_args -- trying to find something that is binary compatible\n # with msvc for returning Py_complex from functions\n extra_compile_args=[]\n \n # fastumath module\n # scipy_base.fastumath module\n umath_c_sources = ['fastumathmodule.c',\n 'fastumath_unsigned.inc',\n 'fastumath_nounsigned.inc',\n '_scipy_mapping.c',\n '_scipy_number.c']\n depends = umath_c_sources # ????\n depends = [os.path.join(local_path,x) for x in depends]\n umath_c_sources = [os.path.join(local_path,x) for x in umath_c_sources]\n umath_c = SourceGenerator(func = None,\n target = os.path.join(local_path,'fastumathmodule.c'),\n sources = umath_c_sources)\n sources = [umath_c, os.path.join(local_path,'isnan.c')]\n define_macros = []\n undef_macros = []\n libraries = []\n if sys.byteorder == \"little\":\n define_macros.append(('USE_MCONF_LITE_LE',None))\n else:\n define_macros.append(('USE_MCONF_LITE_BE',None))\n if sys.platform in ['win32']:\n undef_macros.append('HAVE_INVERSE_HYPERBOLIC')\n else:\n libraries.append('m')\n define_macros.append(('HAVE_INVERSE_HYPERBOLIC',None))\n\n ext_args = {'name':dot_join(package,'fastumath'),\n 'sources':sources,\n 'define_macros': define_macros,\n 'undef_macros': undef_macros,\n 'libraries': libraries,\n 'extra_compile_args': extra_compile_args,\n 'depends': umath_c_sources}\n dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n\n # _nc_compiled_base and _na_compiled_base modules\n\n _compiled_base_c = os.path.join(local_path,'_compiled_base.c')\n def compiled_base_c(ext,src_dir):\n source = os.path.join(src_dir,ext.name.split('.')[-1] + '.c')\n if newer(_compiled_base_c,source):\n copy_file(_compiled_base_c,source)\n return [source]\n\n ext_args = {}\n dict_append(ext_args,\n name=dot_join(package,'_nc_compiled_base'),\n sources = [compiled_base_c],\n depends = [_compiled_base_c],\n define_macros = [('NUMERIC',None)],\n include_dirs = [local_path]\n )\n dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n numarray_info = get_info('numarray')\n if numarray_info:\n ext_args = {}\n dict_append(ext_args,\n name=dot_join(package,'_na_compiled_base'),\n sources = [compiled_base_c],\n depends = [_compiled_base_c],\n define_macros = [('NUMARRAY',None)],\n include_dirs = [local_path]\n )\n dict_append(ext_args,**numarray_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n # display_test module\n sources = [os.path.join(local_path,'src','display_test.c')]\n x11 = get_info('x11')\n if x11:\n x11['define_macros'] = [('HAVE_X11',None)]\n ext = Extension(dot_join(package,'display_test'), sources, **x11)\n config['ext_modules'].append(ext)\n\n return config\n\nif __name__ == '__main__':\n from scipy_base_version import scipy_base_version\n print 'scipy_base Version',scipy_base_version\n from scipy_distutils.core import setup\n\n setup(version = scipy_base_version,\n maintainer = \"SciPy Developers\",\n maintainer_email = \"scipy-dev@scipy.org\",\n description = \"SciPy base module\",\n url = \"http://www.scipy.org\",\n license = \"SciPy License (BSD Style)\",\n **configuration(parent_path='')\n )\n", "source_code_before": "#!/usr/bin/env python\nimport os, sys\nfrom glob import glob\nimport shutil\n\nclass _CleanUpFile:\n \"\"\"CleanUpFile deletes the specified filename when self is destroyed.\"\"\"\n def __init__(self, name):\n self.name = name\n def __del__(self):\n os.remove(self.name)\n # pass # leave source around for debugging\n\ndef _temp_copy(_from, _to):\n \"\"\"temp_copy copies a named file into a named temporary file.\n The temporary will be deleted when the setupext module is destructed.\n \"\"\"\n # Copy the file data from _from to _to\n s = open(_from).read()\n open(_to,\"w+\").write(s)\n # Suppress object rebuild by preserving time stamps.\n stats = os.stat(_from)\n os.utime(_to, (stats.st_atime, stats.st_mtime))\n # Make an object to eliminate the temporary file at exit time.\n globals()[\"_cleanup_\"+_to] = _CleanUpFile(_to)\n\ndef _config_compiled_base(package, local_path, numerix_prefix, macro, info):\n \"\"\"_config_compiled_base returns the Extension object for an\n Numeric or numarray specific version of _compiled_base.\n \"\"\"\n from scipy_distutils.system_info import dict_append\n from scipy_distutils.core import Extension\n from scipy_distutils.misc_util import dot_join\n module = numerix_prefix + \"_compiled_base\"\n source = module + '.c'\n _temp_copy(os.path.join(local_path, \"_compiled_base.c\"),\n os.path.join(local_path, source))\n sources = [source]\n sources = [os.path.join(local_path,x) for x in sources]\n depends = sources\n ext_args = {'name':dot_join(package, module),\n 'sources':sources,\n 'depends':depends,\n 'define_macros':[(macro,1)],\n }\n dict_append(ext_args,**info)\n return Extension(**ext_args)\n \ndef configuration(parent_package='',parent_path=None):\n from scipy_distutils.system_info import get_info, dict_append\n from scipy_distutils.core import Extension\n from scipy_distutils.misc_util import get_path,default_config_dict,dot_join\n from scipy_distutils.misc_util import get_path,default_config_dict,\\\n dot_join,SourceGenerator\n\n package = 'scipy_base'\n local_path = get_path(__name__,parent_path)\n config = default_config_dict(package,parent_package)\n\n numpy_info = get_info('numpy',notfound_action=2)\n\n # extra_compile_args -- trying to find something that is binary compatible\n # with msvc for returning Py_complex from functions\n extra_compile_args=[]\n \n # fastumath module\n # scipy_base.fastumath module\n umath_c_sources = ['fastumathmodule.c',\n 'fastumath_unsigned.inc',\n 'fastumath_nounsigned.inc',\n '_scipy_mapping.c',\n '_scipy_number.c']\n depends = umath_c_sources # ????\n depends = [os.path.join(local_path,x) for x in depends]\n umath_c_sources = [os.path.join(local_path,x) for x in umath_c_sources]\n umath_c = SourceGenerator(func = None,\n target = os.path.join(local_path,'fastumathmodule.c'),\n sources = umath_c_sources)\n sources = [umath_c, os.path.join(local_path,'isnan.c')]\n define_macros = []\n undef_macros = []\n libraries = []\n if sys.byteorder == \"little\":\n define_macros.append(('USE_MCONF_LITE_LE',None))\n else:\n define_macros.append(('USE_MCONF_LITE_BE',None))\n if sys.platform in ['win32']:\n undef_macros.append('HAVE_INVERSE_HYPERBOLIC')\n else:\n libraries.append('m')\n define_macros.append(('HAVE_INVERSE_HYPERBOLIC',None))\n\n ext_args = {'name':dot_join(package,'fastumath'),\n 'sources':sources,\n 'define_macros': define_macros,\n 'undef_macros': undef_macros,\n 'libraries': libraries,\n 'extra_compile_args': extra_compile_args,\n 'depends': umath_c_sources}\n dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n \n # _compiled_base module for Numeric: _nc_compiled_base\n _nc_compiled_base_ext = _config_compiled_base(\n package, local_path, \"_nc\", \"NUMERIC\", numpy_info)\n config['ext_modules'].append(_nc_compiled_base_ext)\n\n # _compiled_base module for numarray: _na_compiled_base\n numarray_info = get_info('numarray')\n if numarray_info:\n _na_compiled_base_ext = _config_compiled_base(\n package, local_path, \"_na\", \"NUMARRAY\", numarray_info)\n config['ext_modules'].append(_na_compiled_base_ext)\n \n # display_test module\n sources = [os.path.join(local_path,'src','display_test.c')]\n x11 = get_info('x11')\n if x11:\n x11['define_macros'] = [('HAVE_X11',None)]\n ext = Extension(dot_join(package,'display_test'), sources, **x11)\n config['ext_modules'].append(ext)\n\n return config\n\nif __name__ == '__main__':\n from scipy_base_version import scipy_base_version\n print 'scipy_base Version',scipy_base_version\n from scipy_distutils.core import setup\n\n setup(version = scipy_base_version,\n maintainer = \"SciPy Developers\",\n maintainer_email = \"scipy-dev@scipy.org\",\n description = \"SciPy base module\",\n url = \"http://www.scipy.org\",\n license = \"SciPy License (BSD Style)\",\n **configuration(parent_path='')\n )\n", "methods": [ { "name": "configuration.compiled_base_c", "long_name": "configuration.compiled_base_c( ext , src_dir )", "filename": "setup_scipy_base.py", "nloc": 5, "complexity": 2, "token_count": 50, "parameters": [ "ext", "src_dir" ], "start_line": 65, "end_line": 69, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "configuration", "long_name": "configuration( parent_package = '' , parent_path = None )", "filename": "setup_scipy_base.py", "nloc": 77, "complexity": 7, "token_count": 533, "parameters": [ "parent_package", "parent_path" ], "start_line": 5, "end_line": 103, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 99, "top_nesting_level": 0 } ], "methods_before": [ { "name": "__init__", "long_name": "__init__( self , name )", "filename": "setup_scipy_base.py", "nloc": 2, "complexity": 1, "token_count": 12, "parameters": [ "self", "name" ], "start_line": 8, "end_line": 9, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__del__", "long_name": "__del__( self )", "filename": "setup_scipy_base.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 10, "end_line": 11, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "_temp_copy", "long_name": "_temp_copy( _from , _to )", "filename": "setup_scipy_base.py", "nloc": 6, "complexity": 1, "token_count": 66, "parameters": [ "_from", "_to" ], "start_line": 14, "end_line": 25, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 }, { "name": "_config_compiled_base", "long_name": "_config_compiled_base( package , local_path , numerix_prefix , macro , info )", "filename": "setup_scipy_base.py", "nloc": 18, "complexity": 2, "token_count": 136, "parameters": [ "package", "local_path", "numerix_prefix", "macro", "info" ], "start_line": 27, "end_line": 47, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 21, "top_nesting_level": 0 }, { "name": "configuration", "long_name": "configuration( parent_package = '' , parent_path = None )", "filename": "setup_scipy_base.py", "nloc": 59, "complexity": 7, "token_count": 423, "parameters": [ "parent_package", "parent_path" ], "start_line": 49, "end_line": 123, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 75, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "_temp_copy", "long_name": "_temp_copy( _from , _to )", "filename": "setup_scipy_base.py", "nloc": 6, "complexity": 1, "token_count": 66, "parameters": [ "_from", "_to" ], "start_line": 14, "end_line": 25, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 }, { "name": "configuration", "long_name": "configuration( parent_package = '' , parent_path = None )", "filename": "setup_scipy_base.py", "nloc": 77, "complexity": 7, "token_count": 533, "parameters": [ "parent_package", "parent_path" ], "start_line": 5, "end_line": 103, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 99, "top_nesting_level": 0 }, { "name": "configuration.compiled_base_c", "long_name": "configuration.compiled_base_c( ext , src_dir )", "filename": "setup_scipy_base.py", "nloc": 5, "complexity": 2, "token_count": 50, "parameters": [ "ext", "src_dir" ], "start_line": 65, "end_line": 69, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "_config_compiled_base", "long_name": "_config_compiled_base( package , local_path , numerix_prefix , macro , info )", "filename": "setup_scipy_base.py", "nloc": 18, "complexity": 2, "token_count": 136, "parameters": [ "package", "local_path", "numerix_prefix", "macro", "info" ], "start_line": 27, "end_line": 47, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 21, "top_nesting_level": 0 }, { "name": "__del__", "long_name": "__del__( self )", "filename": "setup_scipy_base.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 10, "end_line": 11, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self , name )", "filename": "setup_scipy_base.py", "nloc": 2, "complexity": 1, "token_count": 12, "parameters": [ "self", "name" ], "start_line": 8, "end_line": 9, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 } ], "nloc": 95, "complexity": 9, "token_count": 644, "diff_parsed": { "added": [ " from distutils.dep_util import newer", " from distutils.file_util import copy_file", "", " # _nc_compiled_base and _na_compiled_base modules", "", " _compiled_base_c = os.path.join(local_path,'_compiled_base.c')", " def compiled_base_c(ext,src_dir):", " source = os.path.join(src_dir,ext.name.split('.')[-1] + '.c')", " if newer(_compiled_base_c,source):", " copy_file(_compiled_base_c,source)", " return [source]", "", " ext_args = {}", " dict_append(ext_args,", " name=dot_join(package,'_nc_compiled_base'),", " sources = [compiled_base_c],", " depends = [_compiled_base_c],", " define_macros = [('NUMERIC',None)],", " include_dirs = [local_path]", " )", " dict_append(ext_args,**numpy_info)", " config['ext_modules'].append(Extension(**ext_args))", "", " ext_args = {}", " dict_append(ext_args,", " name=dot_join(package,'_na_compiled_base'),", " sources = [compiled_base_c],", " depends = [_compiled_base_c],", " define_macros = [('NUMARRAY',None)],", " include_dirs = [local_path]", " )", " dict_append(ext_args,**numarray_info)", " config['ext_modules'].append(Extension(**ext_args))", "" ], "deleted": [ "import shutil", "", "class _CleanUpFile:", " \"\"\"CleanUpFile deletes the specified filename when self is destroyed.\"\"\"", " def __init__(self, name):", " self.name = name", " def __del__(self):", " os.remove(self.name)", " # pass # leave source around for debugging", "", "def _temp_copy(_from, _to):", " \"\"\"temp_copy copies a named file into a named temporary file.", " The temporary will be deleted when the setupext module is destructed.", " \"\"\"", " # Copy the file data from _from to _to", " s = open(_from).read()", " open(_to,\"w+\").write(s)", " # Suppress object rebuild by preserving time stamps.", " stats = os.stat(_from)", " os.utime(_to, (stats.st_atime, stats.st_mtime))", " # Make an object to eliminate the temporary file at exit time.", " globals()[\"_cleanup_\"+_to] = _CleanUpFile(_to)", "", "def _config_compiled_base(package, local_path, numerix_prefix, macro, info):", " \"\"\"_config_compiled_base returns the Extension object for an", " Numeric or numarray specific version of _compiled_base.", " \"\"\"", " from scipy_distutils.system_info import dict_append", " from scipy_distutils.core import Extension", " from scipy_distutils.misc_util import dot_join", " module = numerix_prefix + \"_compiled_base\"", " source = module + '.c'", " _temp_copy(os.path.join(local_path, \"_compiled_base.c\"),", " os.path.join(local_path, source))", " sources = [source]", " sources = [os.path.join(local_path,x) for x in sources]", " depends = sources", " ext_args = {'name':dot_join(package, module),", " 'sources':sources,", " 'depends':depends,", " 'define_macros':[(macro,1)],", " }", " dict_append(ext_args,**info)", " return Extension(**ext_args)", "", " # _compiled_base module for Numeric: _nc_compiled_base", " _nc_compiled_base_ext = _config_compiled_base(", " package, local_path, \"_nc\", \"NUMERIC\", numpy_info)", " config['ext_modules'].append(_nc_compiled_base_ext)", " # _compiled_base module for numarray: _na_compiled_base", " _na_compiled_base_ext = _config_compiled_base(", " package, local_path, \"_na\", \"NUMARRAY\", numarray_info)", " config['ext_modules'].append(_na_compiled_base_ext)", "" ] } } ] }, { "hash": "52e79768740eec62156261bcfa7cf52caaf32fa9", "msg": "Removed obsolute file. Using MANIFEST.in should be avoided, all source files should be listed in setup_...py files.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-01-28T16:36:27+00:00", "author_timezone": 0, "committer_date": "2005-01-28T16:36:27+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "94d664637f86a02327efa902bc29b82dec58679c" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 1, "insertions": 0, "lines": 1, "files": 1, "dmm_unit_size": null, "dmm_unit_complexity": null, "dmm_unit_interfacing": null, "modified_files": [ { "old_path": "MANIFEST.in", "new_path": null, "filename": "MANIFEST.in", "extension": "in", "change_type": "DELETE", "diff": "@@ -1 +0,0 @@\n-include scipy_base/_compiled_base.c\n", "added_lines": 0, "deleted_lines": 1, "source_code": null, "source_code_before": "include scipy_base/_compiled_base.c\n", "methods": [], "methods_before": [], "changed_methods": [], "nloc": null, "complexity": null, "token_count": null, "diff_parsed": { "added": [], "deleted": [ "include scipy_base/_compiled_base.c" ] } } ] }, { "hash": "ba85fbb3f0b7de0413d18bbaf018c26e7623ee08", "msg": "Renamed Scipy to scipy (as there are packages like scipy_base etc).", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-01-28T16:49:19+00:00", "author_timezone": 0, "committer_date": "2005-01-28T16:49:19+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "52e79768740eec62156261bcfa7cf52caaf32fa9" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 1, "insertions": 1, "lines": 2, "files": 1, "dmm_unit_size": null, "dmm_unit_complexity": null, "dmm_unit_interfacing": null, "modified_files": [ { "old_path": "setup.py", "new_path": "setup.py", "filename": "setup.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -27,7 +27,7 @@ def setup_package():\n sys.path.insert(0, local_path)\n \n try:\n- configs = [{'name':'Scipy_core'}]\n+ configs = [{'name':'scipy_core'}]\n versions = []\n for n in bundle_packages:\n sys.path.insert(0,os.path.join(local_path,n))\n", "added_lines": 1, "deleted_lines": 1, "source_code": "#!/usr/bin/env python\n\"\"\"\nBundle of SciPy core modules:\n scipy_test\n scipy_distutils\n scipy_base\n weave\n\nUsage:\n python setup.py install\n python setup.py sdist -f\n\"\"\"\n\nimport os\nimport sys\n\nfrom scipy_distutils.core import setup\nfrom scipy_distutils.misc_util import default_config_dict\nfrom scipy_distutils.misc_util import get_path, merge_config_dicts\n\nbundle_packages = ['scipy_distutils','scipy_test','scipy_base','weave']\n\ndef setup_package():\n old_path = os.getcwd()\n local_path = os.path.dirname(os.path.abspath(sys.argv[0]))\n os.chdir(local_path)\n sys.path.insert(0, local_path)\n\n try:\n configs = [{'name':'scipy_core'}]\n versions = []\n for n in bundle_packages:\n sys.path.insert(0,os.path.join(local_path,n))\n try:\n mod = __import__('setup_'+n)\n configs.append(mod.configuration(parent_path=local_path))\n mod = __import__(n+'_version')\n versions.append(mod)\n finally:\n del sys.path[0]\n \n config_dict = merge_config_dicts(configs)\n\n major = max([v.major for v in versions])\n minor = max([v.minor for v in versions])\n micro = max([v.micro for v in versions])\n release_level = min([v.release_level for v in versions])\n release_level = ''\n cvs_minor = reduce(lambda a,b:a+b,[v.cvs_minor for v in versions],0)\n cvs_serial = reduce(lambda a,b:a+b,[v.cvs_serial for v in versions],0)\n\n if release_level:\n scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n\n print 'SciPy Core Version %s' % scipy_core_version\n setup( version = scipy_core_version,\n maintainer = \"SciPy Developers\",\n maintainer_email = \"scipy-dev@scipy.org\",\n description = \"SciPy core modules: scipy_{distutils,test,base}\",\n license = \"SciPy License (BSD Style)\",\n url = \"http://www.scipy.org\",\n **config_dict\n )\n\n finally:\n del sys.path[0]\n os.chdir(old_path)\n\nif __name__ == \"__main__\":\n setup_package()\n", "source_code_before": "#!/usr/bin/env python\n\"\"\"\nBundle of SciPy core modules:\n scipy_test\n scipy_distutils\n scipy_base\n weave\n\nUsage:\n python setup.py install\n python setup.py sdist -f\n\"\"\"\n\nimport os\nimport sys\n\nfrom scipy_distutils.core import setup\nfrom scipy_distutils.misc_util import default_config_dict\nfrom scipy_distutils.misc_util import get_path, merge_config_dicts\n\nbundle_packages = ['scipy_distutils','scipy_test','scipy_base','weave']\n\ndef setup_package():\n old_path = os.getcwd()\n local_path = os.path.dirname(os.path.abspath(sys.argv[0]))\n os.chdir(local_path)\n sys.path.insert(0, local_path)\n\n try:\n configs = [{'name':'Scipy_core'}]\n versions = []\n for n in bundle_packages:\n sys.path.insert(0,os.path.join(local_path,n))\n try:\n mod = __import__('setup_'+n)\n configs.append(mod.configuration(parent_path=local_path))\n mod = __import__(n+'_version')\n versions.append(mod)\n finally:\n del sys.path[0]\n \n config_dict = merge_config_dicts(configs)\n\n major = max([v.major for v in versions])\n minor = max([v.minor for v in versions])\n micro = max([v.micro for v in versions])\n release_level = min([v.release_level for v in versions])\n release_level = ''\n cvs_minor = reduce(lambda a,b:a+b,[v.cvs_minor for v in versions],0)\n cvs_serial = reduce(lambda a,b:a+b,[v.cvs_serial for v in versions],0)\n\n if release_level:\n scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n\n print 'SciPy Core Version %s' % scipy_core_version\n setup( version = scipy_core_version,\n maintainer = \"SciPy Developers\",\n maintainer_email = \"scipy-dev@scipy.org\",\n description = \"SciPy core modules: scipy_{distutils,test,base}\",\n license = \"SciPy License (BSD Style)\",\n url = \"http://www.scipy.org\",\n **config_dict\n )\n\n finally:\n del sys.path[0]\n os.chdir(old_path)\n\nif __name__ == \"__main__\":\n setup_package()\n", "methods": [ { "name": "setup_package", "long_name": "setup_package( )", "filename": "setup.py", "nloc": 44, "complexity": 11, "token_count": 326, "parameters": [], "start_line": 23, "end_line": 72, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 50, "top_nesting_level": 0 } ], "methods_before": [ { "name": "setup_package", "long_name": "setup_package( )", "filename": "setup.py", "nloc": 44, "complexity": 11, "token_count": 326, "parameters": [], "start_line": 23, "end_line": 72, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 50, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "setup_package", "long_name": "setup_package( )", "filename": "setup.py", "nloc": 44, "complexity": 11, "token_count": 326, "parameters": [], "start_line": 23, "end_line": 72, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 50, "top_nesting_level": 0 } ], "nloc": 63, "complexity": 11, "token_count": 371, "diff_parsed": { "added": [ " configs = [{'name':'scipy_core'}]" ], "deleted": [ " configs = [{'name':'Scipy_core'}]" ] } } ] }, { "hash": "2974e97c9f4c6b4f5280950ea9d77b81711ecf2c", "msg": "Fixed version numbers.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-01-29T18:24:01+00:00", "author_timezone": 0, "committer_date": "2005-01-29T18:24:01+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "ba85fbb3f0b7de0413d18bbaf018c26e7623ee08" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 38, "insertions": 74, "lines": 112, "files": 7, "dmm_unit_size": 0.0, "dmm_unit_complexity": 0.0, "dmm_unit_interfacing": 1.0, "modified_files": [ { "old_path": "scipy_base/scipy_base_version.py", "new_path": "scipy_base/scipy_base_version.py", "filename": "scipy_base_version.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -8,13 +8,20 @@\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\n except ImportError,msg:\n- print msg\n cvs_minor = 0\n cvs_serial = 0\n \n-if release_level:\n- scipy_base_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n+if cvs_minor or cvs_serial:\n+ if release_level:\n+ scipy_base_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n+ '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+ else:\n+ scipy_base_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n- scipy_base_version = '%(major)d.%(minor)d.%(micro)d'\\\n- '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+ if release_level:\n+ scipy_base_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n+ % (locals ())\n+ else:\n+ scipy_base_version = '%(major)d.%(minor)d.%(micro)d'\\\n+ % (locals ())\n", "added_lines": 12, "deleted_lines": 5, "source_code": "major = 0\nminor = 3\nmicro = 3\n#release_level = 'alpha'\nrelease_level = ''\ntry:\n from __cvs_version__ import cvs_version\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\nexcept ImportError,msg:\n cvs_minor = 0\n cvs_serial = 0\n\nif cvs_minor or cvs_serial:\n if release_level:\n scipy_base_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n scipy_base_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\nelse:\n if release_level:\n scipy_base_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n % (locals ())\n else:\n scipy_base_version = '%(major)d.%(minor)d.%(micro)d'\\\n % (locals ())\n", "source_code_before": "major = 0\nminor = 3\nmicro = 3\n#release_level = 'alpha'\nrelease_level = ''\ntry:\n from __cvs_version__ import cvs_version\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\nexcept ImportError,msg:\n print msg\n cvs_minor = 0\n cvs_serial = 0\n\nif release_level:\n scipy_base_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\nelse:\n scipy_base_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n", "methods": [], "methods_before": [], "changed_methods": [], "nloc": 25, "complexity": 0, "token_count": 102, "diff_parsed": { "added": [ "if cvs_minor or cvs_serial:", " if release_level:", " scipy_base_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " else:", " scipy_base_version = '%(major)d.%(minor)d.%(micro)d'\\", " if release_level:", " scipy_base_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " % (locals ())", " else:", " scipy_base_version = '%(major)d.%(minor)d.%(micro)d'\\", " % (locals ())" ], "deleted": [ " print msg", "if release_level:", " scipy_base_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " scipy_base_version = '%(major)d.%(minor)d.%(micro)d'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())" ] } }, { "old_path": "scipy_distutils/scipy_distutils_version.py", "new_path": "scipy_distutils/scipy_distutils_version.py", "filename": "scipy_distutils_version.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -8,13 +8,20 @@\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\n except ImportError,msg:\n- print msg\n cvs_minor = 0\n cvs_serial = 0\n \n-if release_level:\n- scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n+if cvs_minor or cvs_serial:\n+ if release_level:\n+ scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n+ '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+ else:\n+ scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n- scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d'\\\n- '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+ if release_level:\n+ scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n+ % (locals ())\n+ else:\n+ scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d'\\\n+ % (locals ())\n", "added_lines": 12, "deleted_lines": 5, "source_code": "major = 0\nminor = 3\nmicro = 3\n#release_level = 'alpha'\nrelease_level = ''\ntry:\n from __cvs_version__ import cvs_version\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\nexcept ImportError,msg:\n cvs_minor = 0\n cvs_serial = 0\n\nif cvs_minor or cvs_serial:\n if release_level:\n scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\nelse:\n if release_level:\n scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n % (locals ())\n else:\n scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d'\\\n % (locals ())\n", "source_code_before": "major = 0\nminor = 3\nmicro = 3\n#release_level = 'alpha'\nrelease_level = ''\ntry:\n from __cvs_version__ import cvs_version\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\nexcept ImportError,msg:\n print msg\n cvs_minor = 0\n cvs_serial = 0\n\nif release_level:\n scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\nelse:\n scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n", "methods": [], "methods_before": [], "changed_methods": [], "nloc": 25, "complexity": 0, "token_count": 102, "diff_parsed": { "added": [ "if cvs_minor or cvs_serial:", " if release_level:", " scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " else:", " scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d'\\", " if release_level:", " scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " % (locals ())", " else:", " scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d'\\", " % (locals ())" ], "deleted": [ " print msg", "if release_level:", " scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " scipy_distutils_version = '%(major)d.%(minor)d.%(micro)d'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())" ] } }, { "old_path": "scipy_test/scipy_test_version.py", "new_path": "scipy_test/scipy_test_version.py", "filename": "scipy_test_version.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -8,13 +8,20 @@\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\n except ImportError,msg:\n- print msg\n cvs_minor = 0\n cvs_serial = 0\n \n-if release_level:\n- scipy_test_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n- '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+if cvs_minor or cvs_serial:\n+ if release_level:\n+ scipy_test_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n+ '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+ else:\n+ scipy_test_version = '%(major)d.%(minor)d.%(micro)d'\\\n+ '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n- scipy_test_version = '%(major)d.%(minor)d.%(micro)d'\\\n- '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+ if release_level:\n+ scipy_test_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n+ % (locals ())\n+ else:\n+ scipy_test_version = '%(major)d.%(minor)d.%(micro)d'\\\n+ % (locals ())\n", "added_lines": 13, "deleted_lines": 6, "source_code": "major = 0\nminor = 3\nmicro = 3\n#release_level = 'alpha'\nrelease_level = ''\ntry:\n from __cvs_version__ import cvs_version\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\nexcept ImportError,msg:\n cvs_minor = 0\n cvs_serial = 0\n\nif cvs_minor or cvs_serial:\n if release_level:\n scipy_test_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n scipy_test_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\nelse:\n if release_level:\n scipy_test_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n % (locals ())\n else:\n scipy_test_version = '%(major)d.%(minor)d.%(micro)d'\\\n % (locals ())\n", "source_code_before": "major = 0\nminor = 3\nmicro = 3\n#release_level = 'alpha'\nrelease_level = ''\ntry:\n from __cvs_version__ import cvs_version\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\nexcept ImportError,msg:\n print msg\n cvs_minor = 0\n cvs_serial = 0\n\nif release_level:\n scipy_test_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\nelse:\n scipy_test_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n", "methods": [], "methods_before": [], "changed_methods": [], "nloc": 25, "complexity": 0, "token_count": 102, "diff_parsed": { "added": [ "if cvs_minor or cvs_serial:", " if release_level:", " scipy_test_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " else:", " scipy_test_version = '%(major)d.%(minor)d.%(micro)d'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " if release_level:", " scipy_test_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " % (locals ())", " else:", " scipy_test_version = '%(major)d.%(minor)d.%(micro)d'\\", " % (locals ())" ], "deleted": [ " print msg", "if release_level:", " scipy_test_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " scipy_test_version = '%(major)d.%(minor)d.%(micro)d'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())" ] } }, { "old_path": "setup.py", "new_path": "setup.py", "filename": "setup.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -1,6 +1,6 @@\n #!/usr/bin/env python\n \"\"\"\n-Bundle of SciPy core modules:\n+Bundle of Scipy core modules:\n scipy_test\n scipy_distutils\n scipy_base\n@@ -49,20 +49,29 @@ def setup_package():\n cvs_minor = reduce(lambda a,b:a+b,[v.cvs_minor for v in versions],0)\n cvs_serial = reduce(lambda a,b:a+b,[v.cvs_serial for v in versions],0)\n \n- if release_level:\n- scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n- '_%(release_level)s'\\\n- '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+ if cvs_minor or cvs_serial:\n+ if release_level:\n+ scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n+ '_%(release_level)s'\\\n+ '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+ else:\n+ scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n+ '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n- scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n- '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+ if release_level:\n+ scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n+ '_%(release_level)s'\\\n+ % (locals ())\n+ else:\n+ scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n+ % (locals ())\n \n- print 'SciPy Core Version %s' % scipy_core_version\n+ print 'Scipy Core Version %s' % scipy_core_version\n setup( version = scipy_core_version,\n- maintainer = \"SciPy Developers\",\n+ maintainer = \"Scipy Developers\",\n maintainer_email = \"scipy-dev@scipy.org\",\n- description = \"SciPy core modules: scipy_{distutils,test,base}\",\n- license = \"SciPy License (BSD Style)\",\n+ description = \"Scipy core modules: scipy_{distutils,test,base}, weave\",\n+ license = \"Scipy License (BSD Style)\",\n url = \"http://www.scipy.org\",\n **config_dict\n )\n", "added_lines": 20, "deleted_lines": 11, "source_code": "#!/usr/bin/env python\n\"\"\"\nBundle of Scipy core modules:\n scipy_test\n scipy_distutils\n scipy_base\n weave\n\nUsage:\n python setup.py install\n python setup.py sdist -f\n\"\"\"\n\nimport os\nimport sys\n\nfrom scipy_distutils.core import setup\nfrom scipy_distutils.misc_util import default_config_dict\nfrom scipy_distutils.misc_util import get_path, merge_config_dicts\n\nbundle_packages = ['scipy_distutils','scipy_test','scipy_base','weave']\n\ndef setup_package():\n old_path = os.getcwd()\n local_path = os.path.dirname(os.path.abspath(sys.argv[0]))\n os.chdir(local_path)\n sys.path.insert(0, local_path)\n\n try:\n configs = [{'name':'scipy_core'}]\n versions = []\n for n in bundle_packages:\n sys.path.insert(0,os.path.join(local_path,n))\n try:\n mod = __import__('setup_'+n)\n configs.append(mod.configuration(parent_path=local_path))\n mod = __import__(n+'_version')\n versions.append(mod)\n finally:\n del sys.path[0]\n \n config_dict = merge_config_dicts(configs)\n\n major = max([v.major for v in versions])\n minor = max([v.minor for v in versions])\n micro = max([v.micro for v in versions])\n release_level = min([v.release_level for v in versions])\n release_level = ''\n cvs_minor = reduce(lambda a,b:a+b,[v.cvs_minor for v in versions],0)\n cvs_serial = reduce(lambda a,b:a+b,[v.cvs_serial for v in versions],0)\n\n if cvs_minor or cvs_serial:\n if release_level:\n scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n if release_level:\n scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(release_level)s'\\\n % (locals ())\n else:\n scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n % (locals ())\n\n print 'Scipy Core Version %s' % scipy_core_version\n setup( version = scipy_core_version,\n maintainer = \"Scipy Developers\",\n maintainer_email = \"scipy-dev@scipy.org\",\n description = \"Scipy core modules: scipy_{distutils,test,base}, weave\",\n license = \"Scipy License (BSD Style)\",\n url = \"http://www.scipy.org\",\n **config_dict\n )\n\n finally:\n del sys.path[0]\n os.chdir(old_path)\n\nif __name__ == \"__main__\":\n setup_package()\n", "source_code_before": "#!/usr/bin/env python\n\"\"\"\nBundle of SciPy core modules:\n scipy_test\n scipy_distutils\n scipy_base\n weave\n\nUsage:\n python setup.py install\n python setup.py sdist -f\n\"\"\"\n\nimport os\nimport sys\n\nfrom scipy_distutils.core import setup\nfrom scipy_distutils.misc_util import default_config_dict\nfrom scipy_distutils.misc_util import get_path, merge_config_dicts\n\nbundle_packages = ['scipy_distutils','scipy_test','scipy_base','weave']\n\ndef setup_package():\n old_path = os.getcwd()\n local_path = os.path.dirname(os.path.abspath(sys.argv[0]))\n os.chdir(local_path)\n sys.path.insert(0, local_path)\n\n try:\n configs = [{'name':'scipy_core'}]\n versions = []\n for n in bundle_packages:\n sys.path.insert(0,os.path.join(local_path,n))\n try:\n mod = __import__('setup_'+n)\n configs.append(mod.configuration(parent_path=local_path))\n mod = __import__(n+'_version')\n versions.append(mod)\n finally:\n del sys.path[0]\n \n config_dict = merge_config_dicts(configs)\n\n major = max([v.major for v in versions])\n minor = max([v.minor for v in versions])\n micro = max([v.micro for v in versions])\n release_level = min([v.release_level for v in versions])\n release_level = ''\n cvs_minor = reduce(lambda a,b:a+b,[v.cvs_minor for v in versions],0)\n cvs_serial = reduce(lambda a,b:a+b,[v.cvs_serial for v in versions],0)\n\n if release_level:\n scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n\n print 'SciPy Core Version %s' % scipy_core_version\n setup( version = scipy_core_version,\n maintainer = \"SciPy Developers\",\n maintainer_email = \"scipy-dev@scipy.org\",\n description = \"SciPy core modules: scipy_{distutils,test,base}\",\n license = \"SciPy License (BSD Style)\",\n url = \"http://www.scipy.org\",\n **config_dict\n )\n\n finally:\n del sys.path[0]\n os.chdir(old_path)\n\nif __name__ == \"__main__\":\n setup_package()\n", "methods": [ { "name": "setup_package", "long_name": "setup_package( )", "filename": "setup.py", "nloc": 53, "complexity": 14, "token_count": 360, "parameters": [], "start_line": 23, "end_line": 81, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 59, "top_nesting_level": 0 } ], "methods_before": [ { "name": "setup_package", "long_name": "setup_package( )", "filename": "setup.py", "nloc": 44, "complexity": 11, "token_count": 326, "parameters": [], "start_line": 23, "end_line": 72, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 50, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "setup_package", "long_name": "setup_package( )", "filename": "setup.py", "nloc": 53, "complexity": 14, "token_count": 360, "parameters": [], "start_line": 23, "end_line": 81, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 59, "top_nesting_level": 0 } ], "nloc": 72, "complexity": 14, "token_count": 405, "diff_parsed": { "added": [ "Bundle of Scipy core modules:", " if cvs_minor or cvs_serial:", " if release_level:", " scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\", " '_%(release_level)s'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " else:", " scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " if release_level:", " scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\", " '_%(release_level)s'\\", " % (locals ())", " else:", " scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\", " % (locals ())", " print 'Scipy Core Version %s' % scipy_core_version", " maintainer = \"Scipy Developers\",", " description = \"Scipy core modules: scipy_{distutils,test,base}, weave\",", " license = \"Scipy License (BSD Style)\"," ], "deleted": [ "Bundle of SciPy core modules:", " if release_level:", " scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\", " '_%(release_level)s'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " scipy_core_version = '%(major)d.%(minor)d.%(micro)d'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " print 'SciPy Core Version %s' % scipy_core_version", " maintainer = \"SciPy Developers\",", " description = \"SciPy core modules: scipy_{distutils,test,base}\",", " license = \"SciPy License (BSD Style)\"," ] } }, { "old_path": "weave/setup.py", "new_path": "weave/setup.py", "filename": "setup.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -4,9 +4,7 @@\n from scipy_distutils.misc_util import get_path, merge_config_dicts\n from scipy_distutils.misc_util import package_config\n \n-# Enough changes to bump the number. We need a global method for\n-# versioning\n-version = \"0.3.0\"\n+from weave_version import weave_version\n \n def stand_alone_package(with_dependencies = 0):\n path = get_path(__name__)\n@@ -22,7 +20,7 @@ def stand_alone_package(with_dependencies = 0):\n print 'dep:', dependencies\n config_dict = package_config(primary,dependencies)\n config_dict['name'] = 'weave'\n- setup (version = version,\n+ setup (version = weave_version,\n description = \"Tools for inlining C/C++ in Python\",\n author = \"Eric Jones\",\n author_email = \"eric@enthought.com\",\n", "added_lines": 2, "deleted_lines": 4, "source_code": "#!/usr/bin/env python\nimport os,sys\nfrom scipy_distutils.core import setup\nfrom scipy_distutils.misc_util import get_path, merge_config_dicts\nfrom scipy_distutils.misc_util import package_config\n\nfrom weave_version import weave_version\n \ndef stand_alone_package(with_dependencies = 0):\n path = get_path(__name__)\n old_path = os.getcwd()\n os.chdir(path)\n try:\n primary = ['weave']\n if with_dependencies:\n dependencies= ['scipy_distutils','scipy_test','scipy_base'] \n else:\n dependencies = [] \n \n print 'dep:', dependencies\n config_dict = package_config(primary,dependencies)\n config_dict['name'] = 'weave'\n setup (version = weave_version,\n description = \"Tools for inlining C/C++ in Python\",\n author = \"Eric Jones\",\n author_email = \"eric@enthought.com\",\n licence = \"SciPy License (BSD Style)\",\n url = 'http://www.scipy.org',\n **config_dict\n ) \n finally:\n os.chdir(old_path)\n\nif __name__ == '__main__':\n import sys\n if '--without-dependencies' in sys.argv:\n with_dependencies = 0\n sys.argv.remove('--without-dependencies')\n else:\n with_dependencies = 1 \n stand_alone_package(with_dependencies)\n \n", "source_code_before": "#!/usr/bin/env python\nimport os,sys\nfrom scipy_distutils.core import setup\nfrom scipy_distutils.misc_util import get_path, merge_config_dicts\nfrom scipy_distutils.misc_util import package_config\n\n# Enough changes to bump the number. We need a global method for\n# versioning\nversion = \"0.3.0\"\n \ndef stand_alone_package(with_dependencies = 0):\n path = get_path(__name__)\n old_path = os.getcwd()\n os.chdir(path)\n try:\n primary = ['weave']\n if with_dependencies:\n dependencies= ['scipy_distutils','scipy_test','scipy_base'] \n else:\n dependencies = [] \n \n print 'dep:', dependencies\n config_dict = package_config(primary,dependencies)\n config_dict['name'] = 'weave'\n setup (version = version,\n description = \"Tools for inlining C/C++ in Python\",\n author = \"Eric Jones\",\n author_email = \"eric@enthought.com\",\n licence = \"SciPy License (BSD Style)\",\n url = 'http://www.scipy.org',\n **config_dict\n ) \n finally:\n os.chdir(old_path)\n\nif __name__ == '__main__':\n import sys\n if '--without-dependencies' in sys.argv:\n with_dependencies = 0\n sys.argv.remove('--without-dependencies')\n else:\n with_dependencies = 1 \n stand_alone_package(with_dependencies)\n \n", "methods": [ { "name": "stand_alone_package", "long_name": "stand_alone_package( with_dependencies = 0 )", "filename": "setup.py", "nloc": 23, "complexity": 3, "token_count": 106, "parameters": [ "with_dependencies" ], "start_line": 9, "end_line": 32, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 24, "top_nesting_level": 0 } ], "methods_before": [ { "name": "stand_alone_package", "long_name": "stand_alone_package( with_dependencies = 0 )", "filename": "setup.py", "nloc": 23, "complexity": 3, "token_count": 106, "parameters": [ "with_dependencies" ], "start_line": 11, "end_line": 34, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 24, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "stand_alone_package", "long_name": "stand_alone_package( with_dependencies = 0 )", "filename": "setup.py", "nloc": 23, "complexity": 3, "token_count": 106, "parameters": [ "with_dependencies" ], "start_line": 9, "end_line": 32, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 24, "top_nesting_level": 0 } ], "nloc": 36, "complexity": 3, "token_count": 169, "diff_parsed": { "added": [ "from weave_version import weave_version", " setup (version = weave_version," ], "deleted": [ "# Enough changes to bump the number. We need a global method for", "# versioning", "version = \"0.3.0\"", " setup (version = version," ] } }, { "old_path": "weave/setup_weave.py", "new_path": "weave/setup_weave.py", "filename": "setup_weave.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -50,7 +50,8 @@ def configuration(parent_package='',parent_path=None):\n \n if __name__ == '__main__': \n from scipy_distutils.core import setup\n- setup(version = \"0.3.0\",\n+ from weave_version import weave_version\n+ setup(version = weave_version,\n description = \"Tools for inlining C/C++ in Python\",\n author = \"Eric Jones\",\n author_email = \"eric@enthought.com\",\n", "added_lines": 2, "deleted_lines": 1, "source_code": "#!/usr/bin/env python\n\nimport os\nfrom glob import glob\nfrom scipy_distutils.misc_util import get_path, default_config_dict, dot_join\n\ndef configuration(parent_package='',parent_path=None):\n parent_path2 = parent_path\n parent_path = parent_package\n local_path = get_path(__name__,parent_path2)\n config = default_config_dict('weave',parent_package)\n config['packages'].append(dot_join(parent_package,'weave.tests'))\n test_path = os.path.join(local_path,'tests')\n config['package_dir']['weave.tests'] = test_path\n \n scxx_files = glob(os.path.join(local_path,'scxx','*.*'))\n install_path = os.path.join(parent_path,'weave','scxx')\n config['data_files'].extend( [(install_path,scxx_files)])\n \n blitz_files = glob(os.path.join(local_path,'blitz-20001213','blitz','*.*'))\n install_path = os.path.join(parent_path,'weave','blitz-20001213',\n 'blitz')\n config['data_files'].extend( [(install_path,blitz_files)])\n \n array_files = glob(os.path.join(local_path,'blitz-20001213','blitz',\n 'array','*.*'))\n install_path = os.path.join(parent_path,'weave','blitz-20001213',\n 'blitz','array')\n config['data_files'].extend( [(install_path,array_files)])\n \n meta_files = glob(os.path.join(local_path,'blitz-20001213','blitz',\n 'meta','*.*'))\n install_path = os.path.join(parent_path,'weave','blitz-20001213',\n 'blitz','meta')\n config['data_files'].extend( [(install_path,meta_files)])\n\n swig_files = glob(os.path.join(local_path,'swig','*.c'))\n install_path = os.path.join(parent_path,'weave','swig')\n config['data_files'].extend( [(install_path,swig_files)])\n\n doc_files = glob(os.path.join(local_path,'doc','*.html'))\n install_path = os.path.join(parent_path,'weave','doc')\n config['data_files'].extend( [(install_path,doc_files)])\n\n example_files = glob(os.path.join(local_path,'examples','*.py'))\n install_path = os.path.join(parent_path,'weave','examples')\n config['data_files'].extend( [(install_path,example_files)])\n \n return config\n\nif __name__ == '__main__': \n from scipy_distutils.core import setup\n from weave_version import weave_version\n setup(version = weave_version,\n description = \"Tools for inlining C/C++ in Python\",\n author = \"Eric Jones\",\n author_email = \"eric@enthought.com\",\n licence = \"SciPy License (BSD Style)\",\n url = 'http://www.scipy.org',\n **configuration(parent_path=''))\n", "source_code_before": "#!/usr/bin/env python\n\nimport os\nfrom glob import glob\nfrom scipy_distutils.misc_util import get_path, default_config_dict, dot_join\n\ndef configuration(parent_package='',parent_path=None):\n parent_path2 = parent_path\n parent_path = parent_package\n local_path = get_path(__name__,parent_path2)\n config = default_config_dict('weave',parent_package)\n config['packages'].append(dot_join(parent_package,'weave.tests'))\n test_path = os.path.join(local_path,'tests')\n config['package_dir']['weave.tests'] = test_path\n \n scxx_files = glob(os.path.join(local_path,'scxx','*.*'))\n install_path = os.path.join(parent_path,'weave','scxx')\n config['data_files'].extend( [(install_path,scxx_files)])\n \n blitz_files = glob(os.path.join(local_path,'blitz-20001213','blitz','*.*'))\n install_path = os.path.join(parent_path,'weave','blitz-20001213',\n 'blitz')\n config['data_files'].extend( [(install_path,blitz_files)])\n \n array_files = glob(os.path.join(local_path,'blitz-20001213','blitz',\n 'array','*.*'))\n install_path = os.path.join(parent_path,'weave','blitz-20001213',\n 'blitz','array')\n config['data_files'].extend( [(install_path,array_files)])\n \n meta_files = glob(os.path.join(local_path,'blitz-20001213','blitz',\n 'meta','*.*'))\n install_path = os.path.join(parent_path,'weave','blitz-20001213',\n 'blitz','meta')\n config['data_files'].extend( [(install_path,meta_files)])\n\n swig_files = glob(os.path.join(local_path,'swig','*.c'))\n install_path = os.path.join(parent_path,'weave','swig')\n config['data_files'].extend( [(install_path,swig_files)])\n\n doc_files = glob(os.path.join(local_path,'doc','*.html'))\n install_path = os.path.join(parent_path,'weave','doc')\n config['data_files'].extend( [(install_path,doc_files)])\n\n example_files = glob(os.path.join(local_path,'examples','*.py'))\n install_path = os.path.join(parent_path,'weave','examples')\n config['data_files'].extend( [(install_path,example_files)])\n \n return config\n\nif __name__ == '__main__': \n from scipy_distutils.core import setup\n setup(version = \"0.3.0\",\n description = \"Tools for inlining C/C++ in Python\",\n author = \"Eric Jones\",\n author_email = \"eric@enthought.com\",\n licence = \"SciPy License (BSD Style)\",\n url = 'http://www.scipy.org',\n **configuration(parent_path=''))\n", "methods": [ { "name": "configuration", "long_name": "configuration( parent_package = '' , parent_path = None )", "filename": "setup_weave.py", "nloc": 35, "complexity": 1, "token_count": 412, "parameters": [ "parent_package", "parent_path" ], "start_line": 7, "end_line": 49, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 43, "top_nesting_level": 0 } ], "methods_before": [ { "name": "configuration", "long_name": "configuration( parent_package = '' , parent_path = None )", "filename": "setup_weave.py", "nloc": 35, "complexity": 1, "token_count": 412, "parameters": [ "parent_package", "parent_path" ], "start_line": 7, "end_line": 49, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 43, "top_nesting_level": 0 } ], "changed_methods": [], "nloc": 48, "complexity": 1, "token_count": 478, "diff_parsed": { "added": [ " from weave_version import weave_version", " setup(version = weave_version," ], "deleted": [ " setup(version = \"0.3.0\"," ] } }, { "old_path": "weave/weave_version.py", "new_path": "weave/weave_version.py", "filename": "weave_version.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -8,13 +8,20 @@\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\n except ImportError,msg:\n- print msg\n cvs_minor = 0\n cvs_serial = 0\n \n-if release_level:\n- weave_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n- '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+if cvs_minor or cvs_serial:\n+ if release_level:\n+ weave_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n+ '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+ else:\n+ weave_version = '%(major)d.%(minor)d.%(micro)d'\\\n+ '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n- weave_version = '%(major)d.%(minor)d.%(micro)d'\\\n- '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n+ if release_level:\n+ weave_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n+ % (locals ())\n+ else:\n+ weave_version = '%(major)d.%(minor)d.%(micro)d'\\\n+ % (locals ())\n", "added_lines": 13, "deleted_lines": 6, "source_code": "major = 0\nminor = 3\nmicro = 3\n#release_level = 'alpha'\nrelease_level = ''\ntry:\n from __cvs_version__ import cvs_version\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\nexcept ImportError,msg:\n cvs_minor = 0\n cvs_serial = 0\n\nif cvs_minor or cvs_serial:\n if release_level:\n weave_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n else:\n weave_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\nelse:\n if release_level:\n weave_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n % (locals ())\n else:\n weave_version = '%(major)d.%(minor)d.%(micro)d'\\\n % (locals ())\n", "source_code_before": "major = 0\nminor = 3\nmicro = 3\n#release_level = 'alpha'\nrelease_level = ''\ntry:\n from __cvs_version__ import cvs_version\n cvs_minor = cvs_version[-3]\n cvs_serial = cvs_version[-1]\nexcept ImportError,msg:\n print msg\n cvs_minor = 0\n cvs_serial = 0\n\nif release_level:\n weave_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\nelse:\n weave_version = '%(major)d.%(minor)d.%(micro)d'\\\n '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())\n", "methods": [], "methods_before": [], "changed_methods": [], "nloc": 25, "complexity": 0, "token_count": 102, "diff_parsed": { "added": [ "if cvs_minor or cvs_serial:", " if release_level:", " weave_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " else:", " weave_version = '%(major)d.%(minor)d.%(micro)d'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " if release_level:", " weave_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " % (locals ())", " else:", " weave_version = '%(major)d.%(minor)d.%(micro)d'\\", " % (locals ())" ], "deleted": [ " print msg", "if release_level:", " weave_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())", " weave_version = '%(major)d.%(minor)d.%(micro)d'\\", " '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())" ] } } ] }, { "hash": "661570f96cdb7b12448d64b965edefed98f40436", "msg": "Added not about -g77 and -DUNDERSCORE_G77.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-02-03T20:30:13+00:00", "author_timezone": 0, "committer_date": "2005-02-03T20:30:13+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "2974e97c9f4c6b4f5280950ea9d77b81711ecf2c" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 0, "insertions": 4, "lines": 4, "files": 1, "dmm_unit_size": null, "dmm_unit_complexity": null, "dmm_unit_interfacing": null, "modified_files": [ { "old_path": "scipy_distutils/absoftfcompiler.py", "new_path": "scipy_distutils/absoftfcompiler.py", "filename": "absoftfcompiler.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -2,6 +2,10 @@\n # http://www.absoft.com/literature/osxuserguide.pdf\n # http://www.absoft.com/documentation.html\n \n+# Notes:\n+# - when using -g77 then use -DUNDERSCORE_G77 to compile f2py\n+# generated extension modules (works for f2py v2.45.241_1936 and up)\n+\n import os\n import sys\n \n", "added_lines": 4, "deleted_lines": 0, "source_code": "\n# http://www.absoft.com/literature/osxuserguide.pdf\n# http://www.absoft.com/documentation.html\n\n# Notes:\n# - when using -g77 then use -DUNDERSCORE_G77 to compile f2py\n# generated extension modules (works for f2py v2.45.241_1936 and up)\n\nimport os\nimport sys\n\nfrom cpuinfo import cpu\nfrom fcompiler import FCompiler, dummy_fortran_file\nfrom misc_util import cyg2win32\n\nclass AbsoftFCompiler(FCompiler):\n\n compiler_type = 'absoft'\n #version_pattern = r'FORTRAN 77 Compiler (?P[^\\s*,]*).*?Absoft Corp'\n version_pattern = r'(f90:.*?Absoft Pro FORTRAN Version|FORTRAN 77 Compiler)'+\\\n r' (?P[^\\s*,]*)(.*?Absoft Corp|)'\n\n # samt5735(8)$ f90 -V -c dummy.f\n # f90: Copyright Absoft Corporation 1994-2002; Absoft Pro FORTRAN Version 8.0\n # Note that fink installs g77 as f77, so need to use f90 for detection.\n\n executables = {\n 'version_cmd' : [\"f90\", \"-V -c %(fname)s.f -o %(fname)s.o\" \\\n % {'fname':cyg2win32(dummy_fortran_file())}],\n 'compiler_f77' : [\"f77\"],\n 'compiler_fix' : [\"f90\"],\n 'compiler_f90' : [\"f90\"],\n 'linker_so' : [\"f90\"],\n 'archiver' : [\"ar\", \"-cr\"],\n 'ranlib' : [\"ranlib\"]\n }\n\n if os.name=='nt':\n library_switch = '/out:' #No space after /out:!\n\n module_dir_switch = None\n module_include_switch = '-p'\n\n def get_flags_linker_so(self):\n if os.name=='nt':\n opt = ['/dll']\n else:\n opt = [\"-K\",\"shared\"]\n return opt\n\n def library_dir_option(self, dir):\n if os.name=='nt':\n return ['-link','/PATH:\"%s\"' % (dir)]\n return \"-L\" + dir\n\n def library_option(self, lib):\n if os.name=='nt':\n return '%s.lib' % (lib)\n return \"-l\" + lib\n\n def get_library_dirs(self):\n opt = FCompiler.get_library_dirs(self)\n d = os.environ.get('ABSOFT')\n if d:\n opt.append(os.path.join(d,'LIB'))\n return opt\n\n def get_libraries(self):\n opt = FCompiler.get_libraries(self)\n opt.extend(['fio','f90math','fmath'])\n if os.name =='nt':\n opt.append('COMDLG32')\n return opt\n\n def get_flags(self):\n opt = FCompiler.get_flags(self)\n if os.name != 'nt':\n opt.extend(['-s'])\n if self.get_version():\n if self.get_version()>='8.2':\n opt.append('-fpic')\n return opt\n\n def get_flags_f77(self):\n opt = FCompiler.get_flags_f77(self)\n opt.extend(['-N22','-N90','-N110'])\n v = self.get_version()\n if os.name == 'nt':\n if v and v>='8.0':\n opt.extend(['-f','-N15'])\n else:\n opt.append('-f')\n if v:\n if v<='4.6':\n opt.append('-B108')\n else:\n # Though -N15 is undocumented, it works with\n # Absoft 8.0 on Linux\n opt.append('-N15')\n return opt\n\n def get_flags_f90(self):\n opt = FCompiler.get_flags_f90(self)\n opt.extend([\"-YCFRL=1\",\"-YCOM_NAMES=LCS\",\"-YCOM_PFX\",\"-YEXT_PFX\",\n \"-YCOM_SFX=_\",\"-YEXT_SFX=_\",\"-YEXT_NAMES=LCS\"])\n if self.get_version():\n if self.get_version()>'4.6':\n opt.extend([\"-YDEALLOC=ALL\"]) \n return opt\n\n def get_flags_fix(self):\n opt = FCompiler.get_flags_fix(self)\n opt.extend([\"-YCFRL=1\",\"-YCOM_NAMES=LCS\",\"-YCOM_PFX\",\"-YEXT_PFX\",\n \"-YCOM_SFX=_\",\"-YEXT_SFX=_\",\"-YEXT_NAMES=LCS\"])\n opt.extend([\"-f\",\"fixed\"])\n return opt\n\n def get_flags_opt(self):\n opt = ['-O']\n return opt\n\nif __name__ == '__main__':\n from distutils import log\n log.set_verbosity(2)\n from fcompiler import new_fcompiler\n compiler = new_fcompiler(compiler='absoft')\n compiler.customize()\n print compiler.get_version()\n", "source_code_before": "\n# http://www.absoft.com/literature/osxuserguide.pdf\n# http://www.absoft.com/documentation.html\n\nimport os\nimport sys\n\nfrom cpuinfo import cpu\nfrom fcompiler import FCompiler, dummy_fortran_file\nfrom misc_util import cyg2win32\n\nclass AbsoftFCompiler(FCompiler):\n\n compiler_type = 'absoft'\n #version_pattern = r'FORTRAN 77 Compiler (?P[^\\s*,]*).*?Absoft Corp'\n version_pattern = r'(f90:.*?Absoft Pro FORTRAN Version|FORTRAN 77 Compiler)'+\\\n r' (?P[^\\s*,]*)(.*?Absoft Corp|)'\n\n # samt5735(8)$ f90 -V -c dummy.f\n # f90: Copyright Absoft Corporation 1994-2002; Absoft Pro FORTRAN Version 8.0\n # Note that fink installs g77 as f77, so need to use f90 for detection.\n\n executables = {\n 'version_cmd' : [\"f90\", \"-V -c %(fname)s.f -o %(fname)s.o\" \\\n % {'fname':cyg2win32(dummy_fortran_file())}],\n 'compiler_f77' : [\"f77\"],\n 'compiler_fix' : [\"f90\"],\n 'compiler_f90' : [\"f90\"],\n 'linker_so' : [\"f90\"],\n 'archiver' : [\"ar\", \"-cr\"],\n 'ranlib' : [\"ranlib\"]\n }\n\n if os.name=='nt':\n library_switch = '/out:' #No space after /out:!\n\n module_dir_switch = None\n module_include_switch = '-p'\n\n def get_flags_linker_so(self):\n if os.name=='nt':\n opt = ['/dll']\n else:\n opt = [\"-K\",\"shared\"]\n return opt\n\n def library_dir_option(self, dir):\n if os.name=='nt':\n return ['-link','/PATH:\"%s\"' % (dir)]\n return \"-L\" + dir\n\n def library_option(self, lib):\n if os.name=='nt':\n return '%s.lib' % (lib)\n return \"-l\" + lib\n\n def get_library_dirs(self):\n opt = FCompiler.get_library_dirs(self)\n d = os.environ.get('ABSOFT')\n if d:\n opt.append(os.path.join(d,'LIB'))\n return opt\n\n def get_libraries(self):\n opt = FCompiler.get_libraries(self)\n opt.extend(['fio','f90math','fmath'])\n if os.name =='nt':\n opt.append('COMDLG32')\n return opt\n\n def get_flags(self):\n opt = FCompiler.get_flags(self)\n if os.name != 'nt':\n opt.extend(['-s'])\n if self.get_version():\n if self.get_version()>='8.2':\n opt.append('-fpic')\n return opt\n\n def get_flags_f77(self):\n opt = FCompiler.get_flags_f77(self)\n opt.extend(['-N22','-N90','-N110'])\n v = self.get_version()\n if os.name == 'nt':\n if v and v>='8.0':\n opt.extend(['-f','-N15'])\n else:\n opt.append('-f')\n if v:\n if v<='4.6':\n opt.append('-B108')\n else:\n # Though -N15 is undocumented, it works with\n # Absoft 8.0 on Linux\n opt.append('-N15')\n return opt\n\n def get_flags_f90(self):\n opt = FCompiler.get_flags_f90(self)\n opt.extend([\"-YCFRL=1\",\"-YCOM_NAMES=LCS\",\"-YCOM_PFX\",\"-YEXT_PFX\",\n \"-YCOM_SFX=_\",\"-YEXT_SFX=_\",\"-YEXT_NAMES=LCS\"])\n if self.get_version():\n if self.get_version()>'4.6':\n opt.extend([\"-YDEALLOC=ALL\"]) \n return opt\n\n def get_flags_fix(self):\n opt = FCompiler.get_flags_fix(self)\n opt.extend([\"-YCFRL=1\",\"-YCOM_NAMES=LCS\",\"-YCOM_PFX\",\"-YEXT_PFX\",\n \"-YCOM_SFX=_\",\"-YEXT_SFX=_\",\"-YEXT_NAMES=LCS\"])\n opt.extend([\"-f\",\"fixed\"])\n return opt\n\n def get_flags_opt(self):\n opt = ['-O']\n return opt\n\nif __name__ == '__main__':\n from distutils import log\n log.set_verbosity(2)\n from fcompiler import new_fcompiler\n compiler = new_fcompiler(compiler='absoft')\n compiler.customize()\n print compiler.get_version()\n", "methods": [ { "name": "get_flags_linker_so", "long_name": "get_flags_linker_so( self )", "filename": "absoftfcompiler.py", "nloc": 6, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 44, "end_line": 49, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "library_dir_option", "long_name": "library_dir_option( self , dir )", "filename": "absoftfcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self", "dir" ], "start_line": 51, "end_line": 54, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "library_option", "long_name": "library_option( self , lib )", "filename": "absoftfcompiler.py", "nloc": 4, "complexity": 2, "token_count": 24, "parameters": [ "self", "lib" ], "start_line": 56, "end_line": 59, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "get_library_dirs", "long_name": "get_library_dirs( self )", "filename": "absoftfcompiler.py", "nloc": 6, "complexity": 2, "token_count": 43, "parameters": [ "self" ], "start_line": 61, "end_line": 66, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self )", "filename": "absoftfcompiler.py", "nloc": 6, "complexity": 2, "token_count": 40, "parameters": [ "self" ], "start_line": 68, "end_line": 73, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "get_flags", "long_name": "get_flags( self )", "filename": "absoftfcompiler.py", "nloc": 8, "complexity": 4, "token_count": 52, "parameters": [ "self" ], "start_line": 75, "end_line": 82, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 1 }, { "name": "get_flags_f77", "long_name": "get_flags_f77( self )", "filename": "absoftfcompiler.py", "nloc": 15, "complexity": 6, "token_count": 88, "parameters": [ "self" ], "start_line": 84, "end_line": 100, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 1 }, { "name": "get_flags_f90", "long_name": "get_flags_f90( self )", "filename": "absoftfcompiler.py", "nloc": 8, "complexity": 3, "token_count": 59, "parameters": [ "self" ], "start_line": 102, "end_line": 109, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 1 }, { "name": "get_flags_fix", "long_name": "get_flags_fix( self )", "filename": "absoftfcompiler.py", "nloc": 6, "complexity": 1, "token_count": 45, "parameters": [ "self" ], "start_line": 111, "end_line": 116, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "absoftfcompiler.py", "nloc": 3, "complexity": 1, "token_count": 12, "parameters": [ "self" ], "start_line": 118, "end_line": 120, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 } ], "methods_before": [ { "name": "get_flags_linker_so", "long_name": "get_flags_linker_so( self )", "filename": "absoftfcompiler.py", "nloc": 6, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 40, "end_line": 45, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "library_dir_option", "long_name": "library_dir_option( self , dir )", "filename": "absoftfcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self", "dir" ], "start_line": 47, "end_line": 50, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "library_option", "long_name": "library_option( self , lib )", "filename": "absoftfcompiler.py", "nloc": 4, "complexity": 2, "token_count": 24, "parameters": [ "self", "lib" ], "start_line": 52, "end_line": 55, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "get_library_dirs", "long_name": "get_library_dirs( self )", "filename": "absoftfcompiler.py", "nloc": 6, "complexity": 2, "token_count": 43, "parameters": [ "self" ], "start_line": 57, "end_line": 62, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self )", "filename": "absoftfcompiler.py", "nloc": 6, "complexity": 2, "token_count": 40, "parameters": [ "self" ], "start_line": 64, "end_line": 69, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "get_flags", "long_name": "get_flags( self )", "filename": "absoftfcompiler.py", "nloc": 8, "complexity": 4, "token_count": 52, "parameters": [ "self" ], "start_line": 71, "end_line": 78, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 1 }, { "name": "get_flags_f77", "long_name": "get_flags_f77( self )", "filename": "absoftfcompiler.py", "nloc": 15, "complexity": 6, "token_count": 88, "parameters": [ "self" ], "start_line": 80, "end_line": 96, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 1 }, { "name": "get_flags_f90", "long_name": "get_flags_f90( self )", "filename": "absoftfcompiler.py", "nloc": 8, "complexity": 3, "token_count": 59, "parameters": [ "self" ], "start_line": 98, "end_line": 105, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 1 }, { "name": "get_flags_fix", "long_name": "get_flags_fix( self )", "filename": "absoftfcompiler.py", "nloc": 6, "complexity": 1, "token_count": 45, "parameters": [ "self" ], "start_line": 107, "end_line": 112, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "absoftfcompiler.py", "nloc": 3, "complexity": 1, "token_count": 12, "parameters": [ "self" ], "start_line": 114, "end_line": 116, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 } ], "changed_methods": [], "nloc": 96, "complexity": 25, "token_count": 579, "diff_parsed": { "added": [ "# Notes:", "# - when using -g77 then use -DUNDERSCORE_G77 to compile f2py", "# generated extension modules (works for f2py v2.45.241_1936 and up)", "" ], "deleted": [] } } ] }, { "hash": "7aad76ae379cef2f3f12115fc080832af474dfc0", "msg": "Applied patch by Brent Leback from the PG.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-03-08T11:30:47+00:00", "author_timezone": 0, "committer_date": "2005-03-08T11:30:47+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "661570f96cdb7b12448d64b965edefed98f40436" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 1, "insertions": 1, "lines": 2, "files": 1, "dmm_unit_size": null, "dmm_unit_complexity": null, "dmm_unit_interfacing": null, "modified_files": [ { "old_path": "scipy_distutils/pgfcompiler.py", "new_path": "scipy_distutils/pgfcompiler.py", "filename": "pgfcompiler.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -17,7 +17,7 @@ class PGroupFCompiler(FCompiler):\n 'compiler_f77' : [\"pgf77\"],\n 'compiler_fix' : [\"pgf90\", \"-Mfixed\"],\n 'compiler_f90' : [\"pgf90\"],\n- 'linker_so' : [\"pgf90\",\"-shared\"],\n+ 'linker_so' : [\"pgf90\",\"-shared\",\"-fpic\"],\n 'archiver' : [\"ar\", \"-cr\"],\n 'ranlib' : [\"ranlib\"]\n }\n", "added_lines": 1, "deleted_lines": 1, "source_code": "\n# http://www.pgroup.com\n\nimport os\nimport sys\n\nfrom cpuinfo import cpu\nfrom fcompiler import FCompiler\n\nclass PGroupFCompiler(FCompiler):\n\n compiler_type = 'pg'\n version_pattern = r'\\s*pg(f77|f90|hpf) (?P[\\d.-]+).*'\n\n executables = {\n 'version_cmd' : [\"pgf77\", \"-V 2>/dev/null\"],\n 'compiler_f77' : [\"pgf77\"],\n 'compiler_fix' : [\"pgf90\", \"-Mfixed\"],\n 'compiler_f90' : [\"pgf90\"],\n 'linker_so' : [\"pgf90\",\"-shared\",\"-fpic\"],\n 'archiver' : [\"ar\", \"-cr\"],\n 'ranlib' : [\"ranlib\"]\n }\n pic_flags = ['-fpic']\n module_dir_switch = '-module '\n module_include_switch = '-I'\n\n def get_flags(self):\n opt = ['-Minform=inform','-Mnosecond_underscore']\n return self.pic_flags + opt\n def get_flags_opt(self):\n return ['-fast']\n def get_flags_debug(self):\n return ['-g']\n\nif __name__ == '__main__':\n from distutils import log\n log.set_verbosity(2)\n from fcompiler import new_fcompiler\n compiler = new_fcompiler(compiler='pg')\n compiler.customize()\n print compiler.get_version()\n", "source_code_before": "\n# http://www.pgroup.com\n\nimport os\nimport sys\n\nfrom cpuinfo import cpu\nfrom fcompiler import FCompiler\n\nclass PGroupFCompiler(FCompiler):\n\n compiler_type = 'pg'\n version_pattern = r'\\s*pg(f77|f90|hpf) (?P[\\d.-]+).*'\n\n executables = {\n 'version_cmd' : [\"pgf77\", \"-V 2>/dev/null\"],\n 'compiler_f77' : [\"pgf77\"],\n 'compiler_fix' : [\"pgf90\", \"-Mfixed\"],\n 'compiler_f90' : [\"pgf90\"],\n 'linker_so' : [\"pgf90\",\"-shared\"],\n 'archiver' : [\"ar\", \"-cr\"],\n 'ranlib' : [\"ranlib\"]\n }\n pic_flags = ['-fpic']\n module_dir_switch = '-module '\n module_include_switch = '-I'\n\n def get_flags(self):\n opt = ['-Minform=inform','-Mnosecond_underscore']\n return self.pic_flags + opt\n def get_flags_opt(self):\n return ['-fast']\n def get_flags_debug(self):\n return ['-g']\n\nif __name__ == '__main__':\n from distutils import log\n log.set_verbosity(2)\n from fcompiler import new_fcompiler\n compiler = new_fcompiler(compiler='pg')\n compiler.customize()\n print compiler.get_version()\n", "methods": [ { "name": "get_flags", "long_name": "get_flags( self )", "filename": "pgfcompiler.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "self" ], "start_line": 28, "end_line": 30, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "pgfcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 31, "end_line": 32, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_flags_debug", "long_name": "get_flags_debug( self )", "filename": "pgfcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 33, "end_line": 34, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 } ], "methods_before": [ { "name": "get_flags", "long_name": "get_flags( self )", "filename": "pgfcompiler.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "self" ], "start_line": 28, "end_line": 30, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "pgfcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 31, "end_line": 32, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_flags_debug", "long_name": "get_flags_debug( self )", "filename": "pgfcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 33, "end_line": 34, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 } ], "changed_methods": [], "nloc": 33, "complexity": 3, "token_count": 168, "diff_parsed": { "added": [ " 'linker_so' : [\"pgf90\",\"-shared\",\"-fpic\"]," ], "deleted": [ " 'linker_so' : [\"pgf90\",\"-shared\"]," ] } } ] }, { "hash": "3ac42a6d78dda727f25dc44d08b9c1539b502203", "msg": "Introduced --backends option for build_ext command, when used then also build_src must be specified.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-03-08T22:36:59+00:00", "author_timezone": 0, "committer_date": "2005-03-08T22:36:59+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "7aad76ae379cef2f3f12115fc080832af474dfc0" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 3, "insertions": 116, "lines": 119, "files": 2, "dmm_unit_size": 0.27631578947368424, "dmm_unit_complexity": 0.8947368421052632, "dmm_unit_interfacing": 0.6578947368421053, "modified_files": [ { "old_path": "scipy_distutils/command/build_ext.py", "new_path": "scipy_distutils/command/build_ext.py", "filename": "build_ext.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -24,17 +24,22 @@ class build_ext (old_build_ext):\n user_options = old_build_ext.user_options + [\n ('fcompiler=', None,\n \"specify the Fortran compiler type\"),\n+ ('backends=', None,\n+ \"specify the array backends (numeric,numarray,..) as a comma separated list\"),\n ]\n \n def initialize_options(self):\n old_build_ext.initialize_options(self)\n self.fcompiler = None\n+ self.backends = None\n return\n \n def finalize_options(self):\n old_build_ext.finalize_options(self)\n self.set_undefined_options('config_fc',\n ('fcompiler', 'fcompiler'))\n+ if self.backends is None:\n+ self.backends = None\n return\n \n def run(self):\n@@ -163,11 +168,17 @@ def build_extension(self, ext):\n else:\n kws = {}\n \n+ backend = getattr(ext,'backend',None)\n+ if backend is not None:\n+ output_dir = os.path.join(self.build_temp,'_'+backend)\n+ else:\n+ output_dir = self.build_temp\n+ \n c_objects = []\n if c_sources:\n log.info(\"compiling C sources\")\n c_objects = self.compiler.compile(c_sources,\n- output_dir=self.build_temp,\n+ output_dir=output_dir,\n macros=macros,\n include_dirs=ext.include_dirs,\n debug=self.debug,\n@@ -180,7 +191,7 @@ def build_extension(self, ext):\n self.compiler.compiler_so[0] = self.compiler.compiler_cxx[0]\n \n c_objects += self.compiler.compile(cxx_sources,\n- output_dir=self.build_temp,\n+ output_dir=output_dir,\n macros=macros,\n include_dirs=ext.include_dirs,\n debug=self.debug,\n", "added_lines": 13, "deleted_lines": 2, "source_code": "\"\"\" Modified version of build_ext that handles fortran source files.\n\"\"\"\n\nimport os\nimport string\nimport sys\nfrom glob import glob\nfrom types import *\n\nfrom distutils.dep_util import newer_group, newer\nfrom distutils.command.build_ext import build_ext as old_build_ext\n\nfrom scipy_distutils.command.build_clib import get_headers,get_directories\nfrom scipy_distutils import misc_util, log\nfrom scipy_distutils.misc_util import filter_sources, has_f_sources, \\\n has_cxx_sources\nfrom distutils.errors import DistutilsFileError\n\n\nclass build_ext (old_build_ext):\n\n description = \"build C/C++/F extensions (compile/link to build directory)\"\n\n user_options = old_build_ext.user_options + [\n ('fcompiler=', None,\n \"specify the Fortran compiler type\"),\n ('backends=', None,\n \"specify the array backends (numeric,numarray,..) as a comma separated list\"),\n ]\n\n def initialize_options(self):\n old_build_ext.initialize_options(self)\n self.fcompiler = None\n self.backends = None\n return\n\n def finalize_options(self):\n old_build_ext.finalize_options(self)\n self.set_undefined_options('config_fc',\n ('fcompiler', 'fcompiler'))\n if self.backends is None:\n self.backends = None\n return\n\n def run(self):\n if not self.extensions:\n return\n\n # Make sure that extension sources are complete.\n for ext in self.extensions:\n if not misc_util.all_strings(ext.sources):\n raise TypeError,'Extension \"%s\" sources contains unresolved'\\\n ' items (call build_src before build_ext).' % (ext.name)\n\n if self.distribution.has_c_libraries():\n build_clib = self.get_finalized_command('build_clib')\n self.library_dirs.append(build_clib.build_clib)\n else:\n build_clib = None\n\n # Not including C libraries to the list of\n # extension libraries automatically to prevent\n # bogus linking commands. Extensions must\n # explicitly specify the C libraries that they use.\n\n # Determine if Fortran compiler is needed.\n if build_clib and build_clib.fcompiler is not None:\n need_f_compiler = 1\n else:\n need_f_compiler = 0\n for ext in self.extensions:\n if has_f_sources(ext.sources):\n need_f_compiler = 1\n break\n if getattr(ext,'language','c') in ['f77','f90']:\n need_f_compiler = 1\n break\n\n # Determine if C++ compiler is needed.\n need_cxx_compiler = 0\n for ext in self.extensions:\n if has_cxx_sources(ext.sources):\n need_cxx_compiler = 1\n break\n if getattr(ext,'language','c')=='c++':\n need_cxx_compiler = 1\n break\n\n from distutils.ccompiler import new_compiler\n self.compiler = new_compiler(compiler=self.compiler,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force)\n self.compiler.customize(self.distribution,need_cxx=need_cxx_compiler)\n self.compiler.customize_cmd(self)\n self.compiler.show_customization()\n \n # Initialize Fortran/C++ compilers if needed.\n if need_f_compiler:\n from scipy_distutils.fcompiler import new_fcompiler\n self.fcompiler = new_fcompiler(compiler=self.fcompiler,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force)\n self.fcompiler.customize(self.distribution)\n self.fcompiler.customize_cmd(self)\n self.fcompiler.show_customization()\n\n # Build extensions\n self.build_extensions()\n return\n\n def swig_sources(self, sources):\n # Do nothing. Swig sources have beed handled in build_src command.\n return sources\n\n def build_extension(self, ext):\n sources = ext.sources\n if sources is None or type(sources) not in (ListType, TupleType):\n raise DistutilsSetupError, \\\n (\"in 'ext_modules' option (extension '%s'), \" +\n \"'sources' must be present and must be \" +\n \"a list of source filenames\") % ext.name\n sources = list(sources)\n\n if not sources:\n return\n\n fullname = self.get_ext_fullname(ext.name)\n if self.inplace:\n modpath = string.split(fullname, '.')\n package = string.join(modpath[0:-1], '.')\n base = modpath[-1]\n\n build_py = self.get_finalized_command('build_py')\n package_dir = build_py.get_package_dir(package)\n ext_filename = os.path.join(package_dir,\n self.get_ext_filename(base))\n else:\n ext_filename = os.path.join(self.build_lib,\n self.get_ext_filename(fullname))\n depends = sources + ext.depends\n\n if not (self.force or newer_group(depends, ext_filename, 'newer')):\n log.debug(\"skipping '%s' extension (up-to-date)\", ext.name)\n return\n else:\n log.info(\"building '%s' extension\", ext.name)\n\n extra_args = ext.extra_compile_args or []\n macros = ext.define_macros[:]\n for undef in ext.undef_macros:\n macros.append((undef,))\n\n c_sources, cxx_sources, f_sources, fmodule_sources = \\\n filter_sources(ext.sources)\n if self.compiler.compiler_type=='msvc':\n if cxx_sources:\n # Needed to compile kiva.agg._agg extension.\n extra_args.append('/Zm1000')\n # this hack works around the msvc compiler attributes\n # problem, msvc uses its own convention :(\n c_sources += cxx_sources\n cxx_sources = []\n\n if sys.version[:3]>='2.3':\n kws = {'depends':ext.depends}\n else:\n kws = {}\n\n backend = getattr(ext,'backend',None)\n if backend is not None:\n output_dir = os.path.join(self.build_temp,'_'+backend)\n else:\n output_dir = self.build_temp\n \n c_objects = []\n if c_sources:\n log.info(\"compiling C sources\")\n c_objects = self.compiler.compile(c_sources,\n output_dir=output_dir,\n macros=macros,\n include_dirs=ext.include_dirs,\n debug=self.debug,\n extra_postargs=extra_args,\n **kws)\n if cxx_sources:\n log.info(\"compiling C++ sources\")\n\n old_compiler = self.compiler.compiler_so[0]\n self.compiler.compiler_so[0] = self.compiler.compiler_cxx[0]\n\n c_objects += self.compiler.compile(cxx_sources,\n output_dir=output_dir,\n macros=macros,\n include_dirs=ext.include_dirs,\n debug=self.debug,\n extra_postargs=extra_args,\n **kws)\n self.compiler.compiler_so[0] = old_compiler\n\n check_for_f90_modules = not not fmodule_sources\n\n if f_sources or fmodule_sources:\n extra_postargs = []\n include_dirs = ext.include_dirs[:]\n module_dirs = ext.module_dirs[:]\n\n #if self.fcompiler.compiler_type=='ibm':\n macros = []\n\n if check_for_f90_modules:\n module_build_dir = os.path.join(\\\n self.build_temp,os.path.dirname(\\\n self.get_ext_filename(fullname)))\n\n self.mkpath(module_build_dir)\n if self.fcompiler.module_dir_switch is None:\n existing_modules = glob('*.mod')\n extra_postargs += self.fcompiler.module_options(\\\n module_dirs,module_build_dir)\n\n f_objects = []\n if fmodule_sources:\n log.info(\"compiling Fortran 90 module sources\")\n f_objects = self.fcompiler.compile(fmodule_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_postargs,\n depends=ext.depends)\n\n if check_for_f90_modules \\\n and self.fcompiler.module_dir_switch is None:\n for f in glob('*.mod'):\n if f in existing_modules:\n continue\n try:\n self.move_file(f, module_build_dir)\n except DistutilsFileError: # already exists in destination\n os.remove(f)\n \n if f_sources:\n log.info(\"compiling Fortran sources\")\n f_objects += self.fcompiler.compile(f_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_postargs,\n depends=ext.depends)\n else:\n f_objects = []\n\n objects = c_objects + f_objects\n\n if ext.extra_objects:\n objects.extend(ext.extra_objects)\n extra_args = ext.extra_link_args or []\n\n try:\n old_linker_so_0 = self.compiler.linker_so[0]\n except:\n pass\n \n use_fortran_linker = getattr(ext,'language','c') in ['f77','f90']\n c_libraries = []\n c_library_dirs = []\n if use_fortran_linker or f_sources:\n use_fortran_linker = 1\n elif self.distribution.has_c_libraries(): \n build_clib = self.get_finalized_command('build_clib')\n f_libs = []\n for (lib_name, build_info) in build_clib.libraries:\n if has_f_sources(build_info.get('sources',[])):\n f_libs.append(lib_name)\n if lib_name in ext.libraries:\n # XXX: how to determine if c_libraries contain\n # fortran compiled sources?\n c_libraries.extend(build_info.get('libraries',[]))\n c_library_dirs.extend(build_info.get('library_dirs',[]))\n for l in ext.libraries:\n if l in f_libs:\n use_fortran_linker = 1\n break\n\n # Always use system linker when using MSVC compiler.\n if self.compiler.compiler_type=='msvc' and use_fortran_linker:\n c_libraries.extend(self.fcompiler.libraries)\n c_library_dirs.extend(self.fcompiler.library_dirs)\n use_fortran_linker = 0\n\n if use_fortran_linker:\n if cxx_sources:\n # XXX: Which linker should be used, Fortran or C++?\n log.warn('mixing Fortran and C++ is untested')\n link = self.fcompiler.link_shared_object\n language = ext.language or self.fcompiler.detect_language(f_sources)\n else:\n link = self.compiler.link_shared_object\n if sys.version[:3]>='2.3':\n language = ext.language or self.compiler.detect_language(sources)\n else:\n language = ext.language\n if cxx_sources:\n self.compiler.linker_so[0] = self.compiler.compiler_cxx[0]\n\n if sys.version[:3]>='2.3':\n kws = {'target_lang':language}\n else:\n kws = {}\n\n link(objects, ext_filename,\n libraries=self.get_libraries(ext) + c_libraries,\n library_dirs=ext.library_dirs + c_library_dirs,\n runtime_library_dirs=ext.runtime_library_dirs,\n extra_postargs=extra_args,\n export_symbols=self.get_export_symbols(ext),\n debug=self.debug,\n build_temp=self.build_temp,**kws)\n\n try:\n self.compiler.linker_so[0] = old_linker_so_0\n except:\n pass\n\n return\n\n def get_source_files (self):\n self.check_extensions_list(self.extensions)\n filenames = []\n def visit_func(filenames,dirname,names):\n if os.path.basename(dirname) in ['CVS','.svn']:\n names[:] = []\n return\n for name in names:\n if name[-1] in \"~#\":\n continue\n fullname = os.path.join(dirname,name)\n if os.path.isfile(fullname):\n filenames.append(fullname)\n # Get sources and any include files in the same directory.\n for ext in self.extensions:\n sources = filter(lambda s:type(s) is StringType,ext.sources)\n filenames.extend(sources)\n filenames.extend(get_headers(get_directories(sources)))\n for d in ext.depends:\n if is_local_src_dir(d):\n os.path.walk(d,visit_func,filenames)\n elif os.path.isfile(d):\n filenames.append(d)\n return filenames\n\n def get_outputs (self):\n self.check_extensions_list(self.extensions)\n\n outputs = []\n for ext in self.extensions:\n if not ext.sources:\n continue\n fullname = self.get_ext_fullname(ext.name)\n outputs.append(os.path.join(self.build_lib,\n self.get_ext_filename(fullname)))\n return outputs\n\ndef is_local_src_dir(directory):\n \"\"\" Return true if directory is local directory.\n \"\"\"\n abs_dir = os.path.abspath(directory)\n c = os.path.commonprefix([os.getcwd(),abs_dir])\n new_dir = abs_dir[len(c):].split(os.sep)\n if new_dir and not new_dir[0]:\n new_dir = new_dir[1:]\n if new_dir and new_dir[0]=='build':\n return 0\n new_dir = os.sep.join(new_dir)\n return os.path.isdir(new_dir)\n", "source_code_before": "\"\"\" Modified version of build_ext that handles fortran source files.\n\"\"\"\n\nimport os\nimport string\nimport sys\nfrom glob import glob\nfrom types import *\n\nfrom distutils.dep_util import newer_group, newer\nfrom distutils.command.build_ext import build_ext as old_build_ext\n\nfrom scipy_distutils.command.build_clib import get_headers,get_directories\nfrom scipy_distutils import misc_util, log\nfrom scipy_distutils.misc_util import filter_sources, has_f_sources, \\\n has_cxx_sources\nfrom distutils.errors import DistutilsFileError\n\n\nclass build_ext (old_build_ext):\n\n description = \"build C/C++/F extensions (compile/link to build directory)\"\n\n user_options = old_build_ext.user_options + [\n ('fcompiler=', None,\n \"specify the Fortran compiler type\"),\n ]\n\n def initialize_options(self):\n old_build_ext.initialize_options(self)\n self.fcompiler = None\n return\n\n def finalize_options(self):\n old_build_ext.finalize_options(self)\n self.set_undefined_options('config_fc',\n ('fcompiler', 'fcompiler'))\n return\n\n def run(self):\n if not self.extensions:\n return\n\n # Make sure that extension sources are complete.\n for ext in self.extensions:\n if not misc_util.all_strings(ext.sources):\n raise TypeError,'Extension \"%s\" sources contains unresolved'\\\n ' items (call build_src before build_ext).' % (ext.name)\n\n if self.distribution.has_c_libraries():\n build_clib = self.get_finalized_command('build_clib')\n self.library_dirs.append(build_clib.build_clib)\n else:\n build_clib = None\n\n # Not including C libraries to the list of\n # extension libraries automatically to prevent\n # bogus linking commands. Extensions must\n # explicitly specify the C libraries that they use.\n\n # Determine if Fortran compiler is needed.\n if build_clib and build_clib.fcompiler is not None:\n need_f_compiler = 1\n else:\n need_f_compiler = 0\n for ext in self.extensions:\n if has_f_sources(ext.sources):\n need_f_compiler = 1\n break\n if getattr(ext,'language','c') in ['f77','f90']:\n need_f_compiler = 1\n break\n\n # Determine if C++ compiler is needed.\n need_cxx_compiler = 0\n for ext in self.extensions:\n if has_cxx_sources(ext.sources):\n need_cxx_compiler = 1\n break\n if getattr(ext,'language','c')=='c++':\n need_cxx_compiler = 1\n break\n\n from distutils.ccompiler import new_compiler\n self.compiler = new_compiler(compiler=self.compiler,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force)\n self.compiler.customize(self.distribution,need_cxx=need_cxx_compiler)\n self.compiler.customize_cmd(self)\n self.compiler.show_customization()\n \n # Initialize Fortran/C++ compilers if needed.\n if need_f_compiler:\n from scipy_distutils.fcompiler import new_fcompiler\n self.fcompiler = new_fcompiler(compiler=self.fcompiler,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force)\n self.fcompiler.customize(self.distribution)\n self.fcompiler.customize_cmd(self)\n self.fcompiler.show_customization()\n\n # Build extensions\n self.build_extensions()\n return\n\n def swig_sources(self, sources):\n # Do nothing. Swig sources have beed handled in build_src command.\n return sources\n\n def build_extension(self, ext):\n sources = ext.sources\n if sources is None or type(sources) not in (ListType, TupleType):\n raise DistutilsSetupError, \\\n (\"in 'ext_modules' option (extension '%s'), \" +\n \"'sources' must be present and must be \" +\n \"a list of source filenames\") % ext.name\n sources = list(sources)\n\n if not sources:\n return\n\n fullname = self.get_ext_fullname(ext.name)\n if self.inplace:\n modpath = string.split(fullname, '.')\n package = string.join(modpath[0:-1], '.')\n base = modpath[-1]\n\n build_py = self.get_finalized_command('build_py')\n package_dir = build_py.get_package_dir(package)\n ext_filename = os.path.join(package_dir,\n self.get_ext_filename(base))\n else:\n ext_filename = os.path.join(self.build_lib,\n self.get_ext_filename(fullname))\n depends = sources + ext.depends\n\n if not (self.force or newer_group(depends, ext_filename, 'newer')):\n log.debug(\"skipping '%s' extension (up-to-date)\", ext.name)\n return\n else:\n log.info(\"building '%s' extension\", ext.name)\n\n extra_args = ext.extra_compile_args or []\n macros = ext.define_macros[:]\n for undef in ext.undef_macros:\n macros.append((undef,))\n\n c_sources, cxx_sources, f_sources, fmodule_sources = \\\n filter_sources(ext.sources)\n if self.compiler.compiler_type=='msvc':\n if cxx_sources:\n # Needed to compile kiva.agg._agg extension.\n extra_args.append('/Zm1000')\n # this hack works around the msvc compiler attributes\n # problem, msvc uses its own convention :(\n c_sources += cxx_sources\n cxx_sources = []\n\n if sys.version[:3]>='2.3':\n kws = {'depends':ext.depends}\n else:\n kws = {}\n\n c_objects = []\n if c_sources:\n log.info(\"compiling C sources\")\n c_objects = self.compiler.compile(c_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=ext.include_dirs,\n debug=self.debug,\n extra_postargs=extra_args,\n **kws)\n if cxx_sources:\n log.info(\"compiling C++ sources\")\n\n old_compiler = self.compiler.compiler_so[0]\n self.compiler.compiler_so[0] = self.compiler.compiler_cxx[0]\n\n c_objects += self.compiler.compile(cxx_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=ext.include_dirs,\n debug=self.debug,\n extra_postargs=extra_args,\n **kws)\n self.compiler.compiler_so[0] = old_compiler\n\n check_for_f90_modules = not not fmodule_sources\n\n if f_sources or fmodule_sources:\n extra_postargs = []\n include_dirs = ext.include_dirs[:]\n module_dirs = ext.module_dirs[:]\n\n #if self.fcompiler.compiler_type=='ibm':\n macros = []\n\n if check_for_f90_modules:\n module_build_dir = os.path.join(\\\n self.build_temp,os.path.dirname(\\\n self.get_ext_filename(fullname)))\n\n self.mkpath(module_build_dir)\n if self.fcompiler.module_dir_switch is None:\n existing_modules = glob('*.mod')\n extra_postargs += self.fcompiler.module_options(\\\n module_dirs,module_build_dir)\n\n f_objects = []\n if fmodule_sources:\n log.info(\"compiling Fortran 90 module sources\")\n f_objects = self.fcompiler.compile(fmodule_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_postargs,\n depends=ext.depends)\n\n if check_for_f90_modules \\\n and self.fcompiler.module_dir_switch is None:\n for f in glob('*.mod'):\n if f in existing_modules:\n continue\n try:\n self.move_file(f, module_build_dir)\n except DistutilsFileError: # already exists in destination\n os.remove(f)\n \n if f_sources:\n log.info(\"compiling Fortran sources\")\n f_objects += self.fcompiler.compile(f_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_postargs,\n depends=ext.depends)\n else:\n f_objects = []\n\n objects = c_objects + f_objects\n\n if ext.extra_objects:\n objects.extend(ext.extra_objects)\n extra_args = ext.extra_link_args or []\n\n try:\n old_linker_so_0 = self.compiler.linker_so[0]\n except:\n pass\n \n use_fortran_linker = getattr(ext,'language','c') in ['f77','f90']\n c_libraries = []\n c_library_dirs = []\n if use_fortran_linker or f_sources:\n use_fortran_linker = 1\n elif self.distribution.has_c_libraries(): \n build_clib = self.get_finalized_command('build_clib')\n f_libs = []\n for (lib_name, build_info) in build_clib.libraries:\n if has_f_sources(build_info.get('sources',[])):\n f_libs.append(lib_name)\n if lib_name in ext.libraries:\n # XXX: how to determine if c_libraries contain\n # fortran compiled sources?\n c_libraries.extend(build_info.get('libraries',[]))\n c_library_dirs.extend(build_info.get('library_dirs',[]))\n for l in ext.libraries:\n if l in f_libs:\n use_fortran_linker = 1\n break\n\n # Always use system linker when using MSVC compiler.\n if self.compiler.compiler_type=='msvc' and use_fortran_linker:\n c_libraries.extend(self.fcompiler.libraries)\n c_library_dirs.extend(self.fcompiler.library_dirs)\n use_fortran_linker = 0\n\n if use_fortran_linker:\n if cxx_sources:\n # XXX: Which linker should be used, Fortran or C++?\n log.warn('mixing Fortran and C++ is untested')\n link = self.fcompiler.link_shared_object\n language = ext.language or self.fcompiler.detect_language(f_sources)\n else:\n link = self.compiler.link_shared_object\n if sys.version[:3]>='2.3':\n language = ext.language or self.compiler.detect_language(sources)\n else:\n language = ext.language\n if cxx_sources:\n self.compiler.linker_so[0] = self.compiler.compiler_cxx[0]\n\n if sys.version[:3]>='2.3':\n kws = {'target_lang':language}\n else:\n kws = {}\n\n link(objects, ext_filename,\n libraries=self.get_libraries(ext) + c_libraries,\n library_dirs=ext.library_dirs + c_library_dirs,\n runtime_library_dirs=ext.runtime_library_dirs,\n extra_postargs=extra_args,\n export_symbols=self.get_export_symbols(ext),\n debug=self.debug,\n build_temp=self.build_temp,**kws)\n\n try:\n self.compiler.linker_so[0] = old_linker_so_0\n except:\n pass\n\n return\n\n def get_source_files (self):\n self.check_extensions_list(self.extensions)\n filenames = []\n def visit_func(filenames,dirname,names):\n if os.path.basename(dirname) in ['CVS','.svn']:\n names[:] = []\n return\n for name in names:\n if name[-1] in \"~#\":\n continue\n fullname = os.path.join(dirname,name)\n if os.path.isfile(fullname):\n filenames.append(fullname)\n # Get sources and any include files in the same directory.\n for ext in self.extensions:\n sources = filter(lambda s:type(s) is StringType,ext.sources)\n filenames.extend(sources)\n filenames.extend(get_headers(get_directories(sources)))\n for d in ext.depends:\n if is_local_src_dir(d):\n os.path.walk(d,visit_func,filenames)\n elif os.path.isfile(d):\n filenames.append(d)\n return filenames\n\n def get_outputs (self):\n self.check_extensions_list(self.extensions)\n\n outputs = []\n for ext in self.extensions:\n if not ext.sources:\n continue\n fullname = self.get_ext_fullname(ext.name)\n outputs.append(os.path.join(self.build_lib,\n self.get_ext_filename(fullname)))\n return outputs\n\ndef is_local_src_dir(directory):\n \"\"\" Return true if directory is local directory.\n \"\"\"\n abs_dir = os.path.abspath(directory)\n c = os.path.commonprefix([os.getcwd(),abs_dir])\n new_dir = abs_dir[len(c):].split(os.sep)\n if new_dir and not new_dir[0]:\n new_dir = new_dir[1:]\n if new_dir and new_dir[0]=='build':\n return 0\n new_dir = os.sep.join(new_dir)\n return os.path.isdir(new_dir)\n", "methods": [ { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_ext.py", "nloc": 5, "complexity": 1, "token_count": 22, "parameters": [ "self" ], "start_line": 31, "end_line": 35, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_ext.py", "nloc": 7, "complexity": 2, "token_count": 36, "parameters": [ "self" ], "start_line": 37, "end_line": 43, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_ext.py", "nloc": 50, "complexity": 14, "token_count": 304, "parameters": [ "self" ], "start_line": 45, "end_line": 111, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 67, "top_nesting_level": 1 }, { "name": "swig_sources", "long_name": "swig_sources( self , sources )", "filename": "build_ext.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self", "sources" ], "start_line": 113, "end_line": 115, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "build_extension", "long_name": "build_extension( self , ext )", "filename": "build_ext.py", "nloc": 175, "complexity": 47, "token_count": 1129, "parameters": [ "self", "ext" ], "start_line": 117, "end_line": 328, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 212, "top_nesting_level": 1 }, { "name": "get_source_files.visit_func", "long_name": "get_source_files.visit_func( filenames , dirname , names )", "filename": "build_ext.py", "nloc": 10, "complexity": 5, "token_count": 76, "parameters": [ "filenames", "dirname", "names" ], "start_line": 333, "end_line": 342, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 2 }, { "name": "get_source_files", "long_name": "get_source_files( self )", "filename": "build_ext.py", "nloc": 14, "complexity": 5, "token_count": 105, "parameters": [ "self" ], "start_line": 330, "end_line": 353, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 24, "top_nesting_level": 1 }, { "name": "get_outputs", "long_name": "get_outputs( self )", "filename": "build_ext.py", "nloc": 10, "complexity": 3, "token_count": 65, "parameters": [ "self" ], "start_line": 355, "end_line": 365, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 }, { "name": "is_local_src_dir", "long_name": "is_local_src_dir( directory )", "filename": "build_ext.py", "nloc": 10, "complexity": 5, "token_count": 98, "parameters": [ "directory" ], "start_line": 367, "end_line": 378, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 } ], "methods_before": [ { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_ext.py", "nloc": 4, "complexity": 1, "token_count": 17, "parameters": [ "self" ], "start_line": 29, "end_line": 32, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_ext.py", "nloc": 5, "complexity": 1, "token_count": 24, "parameters": [ "self" ], "start_line": 34, "end_line": 38, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_ext.py", "nloc": 50, "complexity": 14, "token_count": 304, "parameters": [ "self" ], "start_line": 40, "end_line": 106, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 67, "top_nesting_level": 1 }, { "name": "swig_sources", "long_name": "swig_sources( self , sources )", "filename": "build_ext.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self", "sources" ], "start_line": 108, "end_line": 110, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "build_extension", "long_name": "build_extension( self , ext )", "filename": "build_ext.py", "nloc": 170, "complexity": 46, "token_count": 1094, "parameters": [ "self", "ext" ], "start_line": 112, "end_line": 317, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 206, "top_nesting_level": 1 }, { "name": "get_source_files.visit_func", "long_name": "get_source_files.visit_func( filenames , dirname , names )", "filename": "build_ext.py", "nloc": 10, "complexity": 5, "token_count": 76, "parameters": [ "filenames", "dirname", "names" ], "start_line": 322, "end_line": 331, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 2 }, { "name": "get_source_files", "long_name": "get_source_files( self )", "filename": "build_ext.py", "nloc": 14, "complexity": 5, "token_count": 105, "parameters": [ "self" ], "start_line": 319, "end_line": 342, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 24, "top_nesting_level": 1 }, { "name": "get_outputs", "long_name": "get_outputs( self )", "filename": "build_ext.py", "nloc": 10, "complexity": 3, "token_count": 65, "parameters": [ "self" ], "start_line": 344, "end_line": 354, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 }, { "name": "is_local_src_dir", "long_name": "is_local_src_dir( directory )", "filename": "build_ext.py", "nloc": 10, "complexity": 5, "token_count": 98, "parameters": [ "directory" ], "start_line": 356, "end_line": 367, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "build_extension", "long_name": "build_extension( self , ext )", "filename": "build_ext.py", "nloc": 175, "complexity": 47, "token_count": 1129, "parameters": [ "self", "ext" ], "start_line": 117, "end_line": 328, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 212, "top_nesting_level": 1 }, { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_ext.py", "nloc": 5, "complexity": 1, "token_count": 22, "parameters": [ "self" ], "start_line": 31, "end_line": 35, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_ext.py", "nloc": 7, "complexity": 2, "token_count": 36, "parameters": [ "self" ], "start_line": 37, "end_line": 43, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 } ], "nloc": 304, "complexity": 83, "token_count": 1950, "diff_parsed": { "added": [ " ('backends=', None,", " \"specify the array backends (numeric,numarray,..) as a comma separated list\"),", " self.backends = None", " if self.backends is None:", " self.backends = None", " backend = getattr(ext,'backend',None)", " if backend is not None:", " output_dir = os.path.join(self.build_temp,'_'+backend)", " else:", " output_dir = self.build_temp", "", " output_dir=output_dir,", " output_dir=output_dir," ], "deleted": [ " output_dir=self.build_temp,", " output_dir=self.build_temp," ] } }, { "old_path": "scipy_distutils/command/build_src.py", "new_path": "scipy_distutils/command/build_src.py", "filename": "build_src.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -10,8 +10,40 @@\n from distutils.dep_util import newer_group, newer\n \n from scipy_distutils import log\n-from scipy_distutils.misc_util import fortran_ext_match, all_strings\n+from scipy_distutils.misc_util import fortran_ext_match, all_strings, dot_join\n from scipy_distutils.from_template import process_file\n+from scipy_distutils.extension import Extension\n+\n+_split_ext_template = '''\n+import os\n+import sys\n+\n+_which = None, None\n+_backends = %(backends)r\n+if hasattr(sys, \"argv\"):\n+ i = -1\n+ for a in sys.argv:\n+ i += 1\n+ if a.lower()[2:] in _backends: \n+ _which = a.lower()[2:], \"command line\"\n+ del sys.argv[i]\n+ os.environ[_which[0].upper()] = _which[0]\n+ break\n+ del a\n+\n+if _which[0] is None:\n+ for b in _backends:\n+ if os.environ.get(b.upper(),None):\n+ _which = b, \"environment var\"\n+ break\n+ del b\n+\n+if _which[0] is None:\n+ _which = _backends[0], \"defaulted\"\n+\n+exec \"from _\" + _which[0] + \".%(name)s import *\"\n+'''\n+\n \n class build_src(build_ext.build_ext):\n \n@@ -43,6 +75,7 @@ def initialize_options(self):\n self.package_dir = None\n self.f2pyflags = None\n self.swigflags = None\n+ self.backends = None\n return\n \n def finalize_options(self):\n@@ -60,6 +93,9 @@ def finalize_options(self):\n if self.inplace is None:\n build_ext = self.get_finalized_command('build_ext')\n self.inplace = build_ext.inplace\n+ if self.backends is None:\n+ build_ext = self.get_finalized_command('build_ext')\n+ self.backends = build_ext.backends\n \n # py_modules is used in build_py.find_package_modules\n self.py_modules = {}\n@@ -78,9 +114,75 @@ def finalize_options(self):\n def run(self):\n if not (self.extensions or self.libraries):\n return\n+ if self.backends is not None:\n+ self.backend_split()\n self.build_sources()\n return\n \n+ def backend_split(self):\n+ log.info('splitting extensions for backends: %s' % (self.backends))\n+ extensions = []\n+ backends = self.backends.split(',')\n+ for ext in self.extensions:\n+ name = ext.name.split('.')[-1]\n+ fullname = self.get_ext_fullname(ext.name)\n+ def func(extension, src_dir):\n+ source = os.path.join(os.path.dirname(src_dir),name+'.py')\n+ if newer(__file__, source):\n+ f = open(source,'w')\n+ f.write(_split_ext_template \\\n+ % {'name':name,'fullname':fullname,\n+ 'backends':backends})\n+ f.close()\n+ return [ source ]\n+ def func_init(extension, src_dir):\n+ source = os.path.join(src_dir,'__init__.py')\n+ if newer(__file__, source):\n+ f = open(source,'w')\n+ f.close()\n+ return [source]\n+ for b in backends:\n+ new_ext = self.split_extension(ext,b)\n+ new_ext.sources.append(func_init)\n+ extensions.append(new_ext)\n+\n+ new_package = dot_join(*(ext.name.split('.')[:-1]+['_'+b]))\n+ new_package_dir = os.path.join(*([self.build_src]+ext.name.split('.')[:-1]+['_'+b]))\n+ if new_package not in self.distribution.packages:\n+ self.distribution.packages.append(new_package)\n+ self.distribution.package_dir[new_package] = new_package_dir\n+\n+ ext.sources = [func]\n+ extensions.append(ext)\n+ self.extensions[:] = extensions\n+\n+ def split_extension(self, ext, backend):\n+ fullname = self.get_ext_fullname(ext.name)\n+ modpath = fullname.split('.')\n+ package = '.'.join(modpath[0:-1])\n+ name = modpath[-1]\n+ macros = []\n+ macros.append((backend.upper(),None))\n+ new_ext = Extension(name = dot_join(package,'_%s.%s' % (backend,name)),\n+ sources = ext.sources,\n+ include_dirs = ext.include_dirs,\n+ define_macros = ext.define_macros + macros,\n+ undef_macros = ext.undef_macros,\n+ library_dirs = ext.library_dirs,\n+ libraries = ext.libraries,\n+ runtime_library_dirs = ext.runtime_library_dirs,\n+ extra_objects = ext.extra_objects,\n+ extra_compile_args = ext.extra_compile_args,\n+ extra_link_args = ext.extra_link_args,\n+ export_symbols = ext.export_symbols,\n+ depends = ext.depends,\n+ language = ext.language,\n+ f2py_options = ext.f2py_options,\n+ module_dirs = ext.module_dirs\n+ )\n+ new_ext.backend = backend\n+ return new_ext\n+ \n def build_sources(self):\n self.check_extensions_list(self.extensions)\n \n", "added_lines": 103, "deleted_lines": 1, "source_code": "\"\"\" Build swig, f2py, weave, sources.\n\"\"\"\n\nimport os\nimport re\n\nfrom distutils.cmd import Command\nfrom distutils.command import build_ext, build_py\nfrom distutils.util import convert_path\nfrom distutils.dep_util import newer_group, newer\n\nfrom scipy_distutils import log\nfrom scipy_distutils.misc_util import fortran_ext_match, all_strings, dot_join\nfrom scipy_distutils.from_template import process_file\nfrom scipy_distutils.extension import Extension\n\n_split_ext_template = '''\nimport os\nimport sys\n\n_which = None, None\n_backends = %(backends)r\nif hasattr(sys, \"argv\"):\n i = -1\n for a in sys.argv:\n i += 1\n if a.lower()[2:] in _backends: \n _which = a.lower()[2:], \"command line\"\n del sys.argv[i]\n os.environ[_which[0].upper()] = _which[0]\n break\n del a\n\nif _which[0] is None:\n for b in _backends:\n if os.environ.get(b.upper(),None):\n _which = b, \"environment var\"\n break\n del b\n\nif _which[0] is None:\n _which = _backends[0], \"defaulted\"\n\nexec \"from _\" + _which[0] + \".%(name)s import *\"\n'''\n\n\nclass build_src(build_ext.build_ext):\n\n description = \"build sources from SWIG, F2PY files or a function\"\n\n user_options = [\n ('build-src=', 'd', \"directory to \\\"build\\\" sources to\"),\n ('f2pyflags=', None, \"additonal flags to f2py\"),\n ('swigflags=', None, \"additional flags to swig\"),\n ('force', 'f', \"forcibly build everything (ignore file timestamps)\"),\n ('inplace', 'i',\n \"ignore build-lib and put compiled extensions into the source \" +\n \"directory alongside your pure Python modules\"),\n ]\n\n boolean_options = ['force','inplace']\n\n help_options = []\n\n def initialize_options(self):\n self.extensions = None\n self.package = None\n self.py_modules = None\n self.build_src = None\n self.build_lib = None\n self.build_base = None\n self.force = None\n self.inplace = None\n self.package_dir = None\n self.f2pyflags = None\n self.swigflags = None\n self.backends = None\n return\n\n def finalize_options(self):\n self.set_undefined_options('build',\n ('build_base', 'build_base'),\n ('build_lib', 'build_lib'),\n ('force', 'force'))\n if self.package is None:\n self.package = self.distribution.ext_package\n self.extensions = self.distribution.ext_modules\n self.libraries = self.distribution.libraries or []\n self.py_modules = self.distribution.py_modules\n if self.build_src is None:\n self.build_src = os.path.join(self.build_base, 'src')\n if self.inplace is None:\n build_ext = self.get_finalized_command('build_ext')\n self.inplace = build_ext.inplace\n if self.backends is None:\n build_ext = self.get_finalized_command('build_ext')\n self.backends = build_ext.backends\n\n # py_modules is used in build_py.find_package_modules\n self.py_modules = {}\n\n if self.f2pyflags is None:\n self.f2pyflags = []\n else:\n self.f2pyflags = self.f2pyflags.split() # XXX spaces??\n\n if self.swigflags is None:\n self.swigflags = []\n else:\n self.swigflags = self.swigflags.split() # XXX spaces??\n return\n\n def run(self):\n if not (self.extensions or self.libraries):\n return\n if self.backends is not None:\n self.backend_split()\n self.build_sources()\n return\n\n def backend_split(self):\n log.info('splitting extensions for backends: %s' % (self.backends))\n extensions = []\n backends = self.backends.split(',')\n for ext in self.extensions:\n name = ext.name.split('.')[-1]\n fullname = self.get_ext_fullname(ext.name)\n def func(extension, src_dir):\n source = os.path.join(os.path.dirname(src_dir),name+'.py')\n if newer(__file__, source):\n f = open(source,'w')\n f.write(_split_ext_template \\\n % {'name':name,'fullname':fullname,\n 'backends':backends})\n f.close()\n return [ source ]\n def func_init(extension, src_dir):\n source = os.path.join(src_dir,'__init__.py')\n if newer(__file__, source):\n f = open(source,'w')\n f.close()\n return [source]\n for b in backends:\n new_ext = self.split_extension(ext,b)\n new_ext.sources.append(func_init)\n extensions.append(new_ext)\n\n new_package = dot_join(*(ext.name.split('.')[:-1]+['_'+b]))\n new_package_dir = os.path.join(*([self.build_src]+ext.name.split('.')[:-1]+['_'+b]))\n if new_package not in self.distribution.packages:\n self.distribution.packages.append(new_package)\n self.distribution.package_dir[new_package] = new_package_dir\n\n ext.sources = [func]\n extensions.append(ext)\n self.extensions[:] = extensions\n\n def split_extension(self, ext, backend):\n fullname = self.get_ext_fullname(ext.name)\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n name = modpath[-1]\n macros = []\n macros.append((backend.upper(),None))\n new_ext = Extension(name = dot_join(package,'_%s.%s' % (backend,name)),\n sources = ext.sources,\n include_dirs = ext.include_dirs,\n define_macros = ext.define_macros + macros,\n undef_macros = ext.undef_macros,\n library_dirs = ext.library_dirs,\n libraries = ext.libraries,\n runtime_library_dirs = ext.runtime_library_dirs,\n extra_objects = ext.extra_objects,\n extra_compile_args = ext.extra_compile_args,\n extra_link_args = ext.extra_link_args,\n export_symbols = ext.export_symbols,\n depends = ext.depends,\n language = ext.language,\n f2py_options = ext.f2py_options,\n module_dirs = ext.module_dirs\n )\n new_ext.backend = backend\n return new_ext\n \n def build_sources(self):\n self.check_extensions_list(self.extensions)\n\n for ext in self.extensions:\n self.build_extension_sources(ext)\n\n for libname_info in self.libraries:\n self.build_library_sources(*libname_info)\n\n return\n\n def build_library_sources(self, lib_name, build_info):\n sources = list(build_info.get('sources',[]))\n\n if not sources:\n return\n\n log.info('building library \"%s\" sources' % (lib_name))\n\n sources = self.generate_sources(sources, (lib_name, build_info))\n\n build_info['sources'] = sources\n return\n\n def build_extension_sources(self, ext):\n sources = list(ext.sources)\n\n log.info('building extension \"%s\" sources' % (ext.name))\n\n fullname = self.get_ext_fullname(ext.name)\n\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n\n if self.inplace:\n build_py = self.get_finalized_command('build_py')\n self.ext_target_dir = build_py.get_package_dir(package)\n\n sources = self.generate_sources(sources, ext)\n\n sources = self.template_sources(sources, ext)\n \n sources = self.swig_sources(sources, ext)\n\n sources = self.f2py_sources(sources, ext)\n\n sources, py_files = self.filter_py_files(sources)\n\n if not self.py_modules.has_key(package):\n self.py_modules[package] = []\n modules = []\n for f in py_files:\n module = os.path.splitext(os.path.basename(f))[0]\n modules.append((package, module, f))\n self.py_modules[package] += modules\n\n ext.sources = sources\n return\n\n def generate_sources(self, sources, extension):\n new_sources = []\n func_sources = []\n for source in sources:\n if type(source) is type(''):\n new_sources.append(source)\n else:\n func_sources.append(source)\n if not func_sources:\n return new_sources\n if self.inplace:\n build_dir = self.ext_target_dir\n else:\n if type(extension) is type(()):\n name = extension[0]\n else:\n name = extension.name\n build_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n self.mkpath(build_dir)\n for func in func_sources:\n source = func(extension, build_dir)\n if type(source) is type([]):\n [log.info(\" adding '%s' to sources.\" % (s)) for s in source]\n new_sources.extend(source)\n else:\n log.info(\" adding '%s' to sources.\" % (source))\n new_sources.append(source)\n return new_sources\n\n def filter_py_files(self, sources):\n new_sources = []\n py_files = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext=='.py': \n py_files.append(source)\n else:\n new_sources.append(source)\n return new_sources, py_files\n\n def template_sources(self, sources, extension):\n new_sources = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.src': # Template file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n self.mkpath(target_dir)\n target_file = os.path.join(target_dir,os.path.basename(base))\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file)):\n log.info(\"from_template:> %s\" % (target_file))\n outstr = process_file(source)\n fid = open(target_file,'w')\n fid.write(outstr)\n fid.close()\n new_sources.append(target_file)\n else:\n new_sources.append(source)\n return new_sources \n \n def f2py_sources(self, sources, extension):\n new_sources = []\n f2py_sources = []\n f_sources = []\n f2py_targets = {}\n target_dirs = []\n ext_name = extension.name.split('.')[-1]\n skip_f2py = 0\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.pyf': # F2PY interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n if os.path.isfile(source):\n name = get_f2py_modulename(source)\n assert name==ext_name,'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name`\n target_file = os.path.join(target_dir,name+'module.c')\n else:\n log.debug(' source %s does not exist: skipping f2py\\'ing.' \\\n % (source))\n name = ext_name\n skip_f2py = 1\n target_file = os.path.join(target_dir,name+'module.c')\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %smodule.c was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = os.path.join(target_dir,name+'module.c')\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n f2py_sources.append(source)\n f2py_targets[source] = target_file\n new_sources.append(target_file)\n elif fortran_ext_match(ext):\n f_sources.append(source)\n else:\n new_sources.append(source)\n\n if not (f2py_sources or f_sources):\n return new_sources\n\n map(self.mkpath, target_dirs)\n\n f2py_options = extension.f2py_options + self.f2pyflags\n if f2py_sources:\n assert len(f2py_sources)==1,\\\n 'only one .pyf file is allowed per extension module but got'\\\n ' more:'+`f2py_sources`\n source = f2py_sources[0]\n target_file = f2py_targets[source]\n target_dir = os.path.dirname(target_file) or '.'\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file,'newer')) \\\n and not skip_f2py:\n log.info(\"f2py: %s\" % (source))\n import f2py2e\n f2py2e.run_main(f2py_options + ['--build-dir',target_dir,source])\n else:\n log.debug(\" skipping '%s' f2py interface (up-to-date)\" % (source))\n else:\n #XXX TODO: --inplace support for sdist command\n if type(extension) is type(()): name = extension[0]\n else: name = extension.name\n target_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n target_file = os.path.join(target_dir,ext_name + 'module.c')\n new_sources.append(target_file)\n depends = f_sources + extension.depends\n if (self.force or newer_group(depends, target_file, 'newer')) \\\n and not skip_f2py:\n import f2py2e\n log.info(\"f2py:> %s\" % (target_file))\n self.mkpath(target_dir)\n f2py2e.run_main(f2py_options + ['--lower',\n '--build-dir',target_dir]+\\\n ['-m',ext_name]+f_sources)\n else:\n log.debug(\" skipping f2py fortran files for '%s' (up-to-date)\"\\\n % (target_file))\n\n assert os.path.isfile(target_file),`target_file`+' missing'\n\n target_c = os.path.join(self.build_src,'fortranobject.c')\n target_h = os.path.join(self.build_src,'fortranobject.h')\n log.info(\" adding '%s' to sources.\" % (target_c))\n new_sources.append(target_c)\n if self.build_src not in extension.include_dirs:\n log.info(\" adding '%s' to include_dirs.\" \\\n % (self.build_src))\n extension.include_dirs.append(self.build_src)\n\n if not skip_f2py:\n import f2py2e\n d = os.path.dirname(f2py2e.__file__)\n source_c = os.path.join(d,'src','fortranobject.c')\n source_h = os.path.join(d,'src','fortranobject.h')\n if newer(source_c,target_c) or newer(source_h,target_h):\n self.mkpath(os.path.dirname(target_c))\n self.copy_file(source_c,target_c)\n self.copy_file(source_h,target_h)\n else:\n assert os.path.isfile(target_c),`target_c` + ' missing'\n assert os.path.isfile(target_h),`target_h` + ' missing'\n \n for name_ext in ['-f2pywrappers.f','-f2pywrappers2.f90']:\n filename = os.path.join(target_dir,ext_name + name_ext)\n if os.path.isfile(filename):\n log.info(\" adding '%s' to sources.\" % (filename))\n f_sources.append(filename)\n\n return new_sources + f_sources\n\n def swig_sources(self, sources, extension):\n # Assuming SWIG 1.3.14 or later. See compatibility note in\n # http://www.swig.org/Doc1.3/Python.html#Python_nn6\n\n new_sources = []\n swig_sources = []\n swig_targets = {}\n target_dirs = []\n py_files = [] # swig generated .py files\n target_ext = '.c'\n typ = None\n is_cpp = 0\n skip_swig = 0\n ext_name = extension.name.split('.')[-1]\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.i': # SWIG interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n py_target_dir = self.ext_target_dir\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n py_target_dir = target_dir\n if os.path.isfile(source):\n name = get_swig_modulename(source)\n assert name==ext_name[1:],'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name[1:]`\n if typ is None:\n typ = get_swig_target(source)\n is_cpp = typ=='c++'\n if is_cpp:\n target_ext = '.cpp'\n else:\n assert typ == get_swig_target(source),`typ`\n target_file = os.path.join(target_dir,'%s_wrap%s' \\\n % (name, target_ext))\n else:\n log.debug(' source %s does not exist: skipping swig\\'ing.' \\\n % (source))\n name = ext_name[1:]\n skip_swig = 1\n target_file = _find_swig_target(target_dir, name)\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %s_wrap.{c,cpp} was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = _find_swig_target(target_dir, name)\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n new_sources.append(target_file)\n py_files.append(os.path.join(py_target_dir, name+'.py'))\n swig_sources.append(source)\n swig_targets[source] = new_sources[-1]\n else:\n new_sources.append(source)\n\n if not swig_sources:\n return new_sources\n\n if skip_swig:\n return new_sources + py_files\n\n map(self.mkpath, target_dirs)\n swig = self.find_swig()\n swig_cmd = [swig, \"-python\"]\n if is_cpp:\n swig_cmd.append('-c++')\n for d in extension.include_dirs:\n swig_cmd.append('-I'+d)\n for source in swig_sources:\n target = swig_targets[source]\n depends = [source] + extension.depends\n if self.force or newer_group(depends, target, 'newer'):\n log.info(\"%s: %s\" % (os.path.basename(swig) \\\n + (is_cpp and '++' or ''), source))\n self.spawn(swig_cmd + self.swigflags \\\n + [\"-o\", target, '-outdir', py_target_dir, source])\n else:\n log.debug(\" skipping '%s' swig interface (up-to-date)\" \\\n % (source))\n\n return new_sources + py_files\n\ndef appendpath(prefix,path):\n if os.path.isabs(path):\n absprefix = os.path.abspath(prefix)\n d = os.path.commonprefix([absprefix,path])\n subpath = path[len(d):]\n assert not os.path.isabs(subpath),`subpath`\n return os.path.normpath(os.path.join(prefix,subpath))\n return os.path.normpath(os.path.join(prefix, path))\n\n#### SWIG related auxiliary functions ####\n_swig_module_name_match = re.compile(r'\\s*%module\\s*(?P[\\w_]+)',\n re.I).match\n_has_c_header = re.compile(r'-[*]-\\s*c\\s*-[*]-',re.I).search\n_has_cpp_header = re.compile(r'-[*]-\\s*c[+][+]\\s*-[*]-',re.I).search\n\ndef get_swig_target(source):\n f = open(source,'r')\n result = 'c'\n line = f.readline()\n if _has_cpp_header(line):\n result = 'c++'\n if _has_c_header(line):\n result = 'c'\n f.close()\n return result\n\ndef get_swig_modulename(source):\n f = open(source,'r')\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _swig_module_name_match(line)\n if m:\n name = m.group('name')\n break\n f.close()\n return name\n\ndef _find_swig_target(target_dir,name):\n for ext in ['.cpp','.c']:\n target = os.path.join(target_dir,'%s_wrap%s' % (name, ext))\n if os.path.isfile(target):\n break\n return target\n\n#### F2PY related auxiliary functions ####\n\n_f2py_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]+)',\n re.I).match\n_f2py_user_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]*?'\\\n '__user__[\\w_]*)',re.I).match\n\ndef get_f2py_modulename(source):\n name = None\n f = open(source)\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _f2py_module_name_match(line)\n if m:\n if _f2py_user_module_name_match(line): # skip *__user__* names\n continue\n name = m.group('name')\n break\n f.close()\n return name\n\n##########################################\n", "source_code_before": "\"\"\" Build swig, f2py, weave, sources.\n\"\"\"\n\nimport os\nimport re\n\nfrom distutils.cmd import Command\nfrom distutils.command import build_ext, build_py\nfrom distutils.util import convert_path\nfrom distutils.dep_util import newer_group, newer\n\nfrom scipy_distutils import log\nfrom scipy_distutils.misc_util import fortran_ext_match, all_strings\nfrom scipy_distutils.from_template import process_file\n\nclass build_src(build_ext.build_ext):\n\n description = \"build sources from SWIG, F2PY files or a function\"\n\n user_options = [\n ('build-src=', 'd', \"directory to \\\"build\\\" sources to\"),\n ('f2pyflags=', None, \"additonal flags to f2py\"),\n ('swigflags=', None, \"additional flags to swig\"),\n ('force', 'f', \"forcibly build everything (ignore file timestamps)\"),\n ('inplace', 'i',\n \"ignore build-lib and put compiled extensions into the source \" +\n \"directory alongside your pure Python modules\"),\n ]\n\n boolean_options = ['force','inplace']\n\n help_options = []\n\n def initialize_options(self):\n self.extensions = None\n self.package = None\n self.py_modules = None\n self.build_src = None\n self.build_lib = None\n self.build_base = None\n self.force = None\n self.inplace = None\n self.package_dir = None\n self.f2pyflags = None\n self.swigflags = None\n return\n\n def finalize_options(self):\n self.set_undefined_options('build',\n ('build_base', 'build_base'),\n ('build_lib', 'build_lib'),\n ('force', 'force'))\n if self.package is None:\n self.package = self.distribution.ext_package\n self.extensions = self.distribution.ext_modules\n self.libraries = self.distribution.libraries or []\n self.py_modules = self.distribution.py_modules\n if self.build_src is None:\n self.build_src = os.path.join(self.build_base, 'src')\n if self.inplace is None:\n build_ext = self.get_finalized_command('build_ext')\n self.inplace = build_ext.inplace\n\n # py_modules is used in build_py.find_package_modules\n self.py_modules = {}\n\n if self.f2pyflags is None:\n self.f2pyflags = []\n else:\n self.f2pyflags = self.f2pyflags.split() # XXX spaces??\n\n if self.swigflags is None:\n self.swigflags = []\n else:\n self.swigflags = self.swigflags.split() # XXX spaces??\n return\n\n def run(self):\n if not (self.extensions or self.libraries):\n return\n self.build_sources()\n return\n\n def build_sources(self):\n self.check_extensions_list(self.extensions)\n\n for ext in self.extensions:\n self.build_extension_sources(ext)\n\n for libname_info in self.libraries:\n self.build_library_sources(*libname_info)\n\n return\n\n def build_library_sources(self, lib_name, build_info):\n sources = list(build_info.get('sources',[]))\n\n if not sources:\n return\n\n log.info('building library \"%s\" sources' % (lib_name))\n\n sources = self.generate_sources(sources, (lib_name, build_info))\n\n build_info['sources'] = sources\n return\n\n def build_extension_sources(self, ext):\n sources = list(ext.sources)\n\n log.info('building extension \"%s\" sources' % (ext.name))\n\n fullname = self.get_ext_fullname(ext.name)\n\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n\n if self.inplace:\n build_py = self.get_finalized_command('build_py')\n self.ext_target_dir = build_py.get_package_dir(package)\n\n sources = self.generate_sources(sources, ext)\n\n sources = self.template_sources(sources, ext)\n \n sources = self.swig_sources(sources, ext)\n\n sources = self.f2py_sources(sources, ext)\n\n sources, py_files = self.filter_py_files(sources)\n\n if not self.py_modules.has_key(package):\n self.py_modules[package] = []\n modules = []\n for f in py_files:\n module = os.path.splitext(os.path.basename(f))[0]\n modules.append((package, module, f))\n self.py_modules[package] += modules\n\n ext.sources = sources\n return\n\n def generate_sources(self, sources, extension):\n new_sources = []\n func_sources = []\n for source in sources:\n if type(source) is type(''):\n new_sources.append(source)\n else:\n func_sources.append(source)\n if not func_sources:\n return new_sources\n if self.inplace:\n build_dir = self.ext_target_dir\n else:\n if type(extension) is type(()):\n name = extension[0]\n else:\n name = extension.name\n build_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n self.mkpath(build_dir)\n for func in func_sources:\n source = func(extension, build_dir)\n if type(source) is type([]):\n [log.info(\" adding '%s' to sources.\" % (s)) for s in source]\n new_sources.extend(source)\n else:\n log.info(\" adding '%s' to sources.\" % (source))\n new_sources.append(source)\n return new_sources\n\n def filter_py_files(self, sources):\n new_sources = []\n py_files = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext=='.py': \n py_files.append(source)\n else:\n new_sources.append(source)\n return new_sources, py_files\n\n def template_sources(self, sources, extension):\n new_sources = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.src': # Template file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n self.mkpath(target_dir)\n target_file = os.path.join(target_dir,os.path.basename(base))\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file)):\n log.info(\"from_template:> %s\" % (target_file))\n outstr = process_file(source)\n fid = open(target_file,'w')\n fid.write(outstr)\n fid.close()\n new_sources.append(target_file)\n else:\n new_sources.append(source)\n return new_sources \n \n def f2py_sources(self, sources, extension):\n new_sources = []\n f2py_sources = []\n f_sources = []\n f2py_targets = {}\n target_dirs = []\n ext_name = extension.name.split('.')[-1]\n skip_f2py = 0\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.pyf': # F2PY interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n if os.path.isfile(source):\n name = get_f2py_modulename(source)\n assert name==ext_name,'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name`\n target_file = os.path.join(target_dir,name+'module.c')\n else:\n log.debug(' source %s does not exist: skipping f2py\\'ing.' \\\n % (source))\n name = ext_name\n skip_f2py = 1\n target_file = os.path.join(target_dir,name+'module.c')\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %smodule.c was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = os.path.join(target_dir,name+'module.c')\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n f2py_sources.append(source)\n f2py_targets[source] = target_file\n new_sources.append(target_file)\n elif fortran_ext_match(ext):\n f_sources.append(source)\n else:\n new_sources.append(source)\n\n if not (f2py_sources or f_sources):\n return new_sources\n\n map(self.mkpath, target_dirs)\n\n f2py_options = extension.f2py_options + self.f2pyflags\n if f2py_sources:\n assert len(f2py_sources)==1,\\\n 'only one .pyf file is allowed per extension module but got'\\\n ' more:'+`f2py_sources`\n source = f2py_sources[0]\n target_file = f2py_targets[source]\n target_dir = os.path.dirname(target_file) or '.'\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file,'newer')) \\\n and not skip_f2py:\n log.info(\"f2py: %s\" % (source))\n import f2py2e\n f2py2e.run_main(f2py_options + ['--build-dir',target_dir,source])\n else:\n log.debug(\" skipping '%s' f2py interface (up-to-date)\" % (source))\n else:\n #XXX TODO: --inplace support for sdist command\n if type(extension) is type(()): name = extension[0]\n else: name = extension.name\n target_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n target_file = os.path.join(target_dir,ext_name + 'module.c')\n new_sources.append(target_file)\n depends = f_sources + extension.depends\n if (self.force or newer_group(depends, target_file, 'newer')) \\\n and not skip_f2py:\n import f2py2e\n log.info(\"f2py:> %s\" % (target_file))\n self.mkpath(target_dir)\n f2py2e.run_main(f2py_options + ['--lower',\n '--build-dir',target_dir]+\\\n ['-m',ext_name]+f_sources)\n else:\n log.debug(\" skipping f2py fortran files for '%s' (up-to-date)\"\\\n % (target_file))\n\n assert os.path.isfile(target_file),`target_file`+' missing'\n\n target_c = os.path.join(self.build_src,'fortranobject.c')\n target_h = os.path.join(self.build_src,'fortranobject.h')\n log.info(\" adding '%s' to sources.\" % (target_c))\n new_sources.append(target_c)\n if self.build_src not in extension.include_dirs:\n log.info(\" adding '%s' to include_dirs.\" \\\n % (self.build_src))\n extension.include_dirs.append(self.build_src)\n\n if not skip_f2py:\n import f2py2e\n d = os.path.dirname(f2py2e.__file__)\n source_c = os.path.join(d,'src','fortranobject.c')\n source_h = os.path.join(d,'src','fortranobject.h')\n if newer(source_c,target_c) or newer(source_h,target_h):\n self.mkpath(os.path.dirname(target_c))\n self.copy_file(source_c,target_c)\n self.copy_file(source_h,target_h)\n else:\n assert os.path.isfile(target_c),`target_c` + ' missing'\n assert os.path.isfile(target_h),`target_h` + ' missing'\n \n for name_ext in ['-f2pywrappers.f','-f2pywrappers2.f90']:\n filename = os.path.join(target_dir,ext_name + name_ext)\n if os.path.isfile(filename):\n log.info(\" adding '%s' to sources.\" % (filename))\n f_sources.append(filename)\n\n return new_sources + f_sources\n\n def swig_sources(self, sources, extension):\n # Assuming SWIG 1.3.14 or later. See compatibility note in\n # http://www.swig.org/Doc1.3/Python.html#Python_nn6\n\n new_sources = []\n swig_sources = []\n swig_targets = {}\n target_dirs = []\n py_files = [] # swig generated .py files\n target_ext = '.c'\n typ = None\n is_cpp = 0\n skip_swig = 0\n ext_name = extension.name.split('.')[-1]\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.i': # SWIG interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n py_target_dir = self.ext_target_dir\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n py_target_dir = target_dir\n if os.path.isfile(source):\n name = get_swig_modulename(source)\n assert name==ext_name[1:],'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name[1:]`\n if typ is None:\n typ = get_swig_target(source)\n is_cpp = typ=='c++'\n if is_cpp:\n target_ext = '.cpp'\n else:\n assert typ == get_swig_target(source),`typ`\n target_file = os.path.join(target_dir,'%s_wrap%s' \\\n % (name, target_ext))\n else:\n log.debug(' source %s does not exist: skipping swig\\'ing.' \\\n % (source))\n name = ext_name[1:]\n skip_swig = 1\n target_file = _find_swig_target(target_dir, name)\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %s_wrap.{c,cpp} was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = _find_swig_target(target_dir, name)\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n new_sources.append(target_file)\n py_files.append(os.path.join(py_target_dir, name+'.py'))\n swig_sources.append(source)\n swig_targets[source] = new_sources[-1]\n else:\n new_sources.append(source)\n\n if not swig_sources:\n return new_sources\n\n if skip_swig:\n return new_sources + py_files\n\n map(self.mkpath, target_dirs)\n swig = self.find_swig()\n swig_cmd = [swig, \"-python\"]\n if is_cpp:\n swig_cmd.append('-c++')\n for d in extension.include_dirs:\n swig_cmd.append('-I'+d)\n for source in swig_sources:\n target = swig_targets[source]\n depends = [source] + extension.depends\n if self.force or newer_group(depends, target, 'newer'):\n log.info(\"%s: %s\" % (os.path.basename(swig) \\\n + (is_cpp and '++' or ''), source))\n self.spawn(swig_cmd + self.swigflags \\\n + [\"-o\", target, '-outdir', py_target_dir, source])\n else:\n log.debug(\" skipping '%s' swig interface (up-to-date)\" \\\n % (source))\n\n return new_sources + py_files\n\ndef appendpath(prefix,path):\n if os.path.isabs(path):\n absprefix = os.path.abspath(prefix)\n d = os.path.commonprefix([absprefix,path])\n subpath = path[len(d):]\n assert not os.path.isabs(subpath),`subpath`\n return os.path.normpath(os.path.join(prefix,subpath))\n return os.path.normpath(os.path.join(prefix, path))\n\n#### SWIG related auxiliary functions ####\n_swig_module_name_match = re.compile(r'\\s*%module\\s*(?P[\\w_]+)',\n re.I).match\n_has_c_header = re.compile(r'-[*]-\\s*c\\s*-[*]-',re.I).search\n_has_cpp_header = re.compile(r'-[*]-\\s*c[+][+]\\s*-[*]-',re.I).search\n\ndef get_swig_target(source):\n f = open(source,'r')\n result = 'c'\n line = f.readline()\n if _has_cpp_header(line):\n result = 'c++'\n if _has_c_header(line):\n result = 'c'\n f.close()\n return result\n\ndef get_swig_modulename(source):\n f = open(source,'r')\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _swig_module_name_match(line)\n if m:\n name = m.group('name')\n break\n f.close()\n return name\n\ndef _find_swig_target(target_dir,name):\n for ext in ['.cpp','.c']:\n target = os.path.join(target_dir,'%s_wrap%s' % (name, ext))\n if os.path.isfile(target):\n break\n return target\n\n#### F2PY related auxiliary functions ####\n\n_f2py_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]+)',\n re.I).match\n_f2py_user_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]*?'\\\n '__user__[\\w_]*)',re.I).match\n\ndef get_f2py_modulename(source):\n name = None\n f = open(source)\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _f2py_module_name_match(line)\n if m:\n if _f2py_user_module_name_match(line): # skip *__user__* names\n continue\n name = m.group('name')\n break\n f.close()\n return name\n\n##########################################\n", "methods": [ { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_src.py", "nloc": 14, "complexity": 1, "token_count": 66, "parameters": [ "self" ], "start_line": 66, "end_line": 79, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 1 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_src.py", "nloc": 28, "complexity": 8, "token_count": 201, "parameters": [ "self" ], "start_line": 81, "end_line": 112, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 32, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_src.py", "nloc": 7, "complexity": 4, "token_count": 37, "parameters": [ "self" ], "start_line": 114, "end_line": 120, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "backend_split.func", "long_name": "backend_split.func( extension , src_dir )", "filename": "build_src.py", "nloc": 9, "complexity": 2, "token_count": 74, "parameters": [ "extension", "src_dir" ], "start_line": 129, "end_line": 137, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 3 }, { "name": "backend_split.func_init", "long_name": "backend_split.func_init( extension , src_dir )", "filename": "build_src.py", "nloc": 6, "complexity": 2, "token_count": 44, "parameters": [ "extension", "src_dir" ], "start_line": 138, "end_line": 143, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 3 }, { "name": "backend_split", "long_name": "backend_split( self )", "filename": "build_src.py", "nloc": 21, "complexity": 4, "token_count": 210, "parameters": [ "self" ], "start_line": 122, "end_line": 157, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "split_extension", "long_name": "split_extension( self , ext , backend )", "filename": "build_src.py", "nloc": 26, "complexity": 1, "token_count": 184, "parameters": [ "self", "ext", "backend" ], "start_line": 159, "end_line": 184, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "build_sources", "long_name": "build_sources( self )", "filename": "build_src.py", "nloc": 7, "complexity": 3, "token_count": 41, "parameters": [ "self" ], "start_line": 186, "end_line": 195, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "build_library_sources", "long_name": "build_library_sources( self , lib_name , build_info )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 59, "parameters": [ "self", "lib_name", "build_info" ], "start_line": 197, "end_line": 208, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 1 }, { "name": "build_extension_sources", "long_name": "build_extension_sources( self , ext )", "filename": "build_src.py", "nloc": 23, "complexity": 4, "token_count": 207, "parameters": [ "self", "ext" ], "start_line": 210, "end_line": 243, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 34, "top_nesting_level": 1 }, { "name": "generate_sources", "long_name": "generate_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 29, "complexity": 9, "token_count": 193, "parameters": [ "self", "sources", "extension" ], "start_line": 245, "end_line": 273, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 1 }, { "name": "filter_py_files", "long_name": "filter_py_files( self , sources )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "self", "sources" ], "start_line": 275, "end_line": 284, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "template_sources", "long_name": "template_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 22, "complexity": 6, "token_count": 170, "parameters": [ "self", "sources", "extension" ], "start_line": 286, "end_line": 307, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "f2py_sources", "long_name": "f2py_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 110, "complexity": 24, "token_count": 874, "parameters": [ "self", "sources", "extension" ], "start_line": 309, "end_line": 428, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 120, "top_nesting_level": 1 }, { "name": "swig_sources", "long_name": "swig_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 80, "complexity": 17, "token_count": 539, "parameters": [ "self", "sources", "extension" ], "start_line": 430, "end_line": 517, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 88, "top_nesting_level": 1 }, { "name": "appendpath", "long_name": "appendpath( prefix , path )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 101, "parameters": [ "prefix", "path" ], "start_line": 519, "end_line": 526, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "get_swig_target", "long_name": "get_swig_target( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 48, "parameters": [ "source" ], "start_line": 534, "end_line": 543, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "get_swig_modulename", "long_name": "get_swig_modulename( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "source" ], "start_line": 545, "end_line": 554, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "_find_swig_target", "long_name": "_find_swig_target( target_dir , name )", "filename": "build_src.py", "nloc": 6, "complexity": 3, "token_count": 47, "parameters": [ "target_dir", "name" ], "start_line": 556, "end_line": 561, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "get_f2py_modulename", "long_name": "get_f2py_modulename( source )", "filename": "build_src.py", "nloc": 13, "complexity": 4, "token_count": 65, "parameters": [ "source" ], "start_line": 570, "end_line": 582, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 0 } ], "methods_before": [ { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_src.py", "nloc": 13, "complexity": 1, "token_count": 61, "parameters": [ "self" ], "start_line": 34, "end_line": 46, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_src.py", "nloc": 25, "complexity": 7, "token_count": 179, "parameters": [ "self" ], "start_line": 48, "end_line": 76, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_src.py", "nloc": 5, "complexity": 3, "token_count": 24, "parameters": [ "self" ], "start_line": 78, "end_line": 82, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "build_sources", "long_name": "build_sources( self )", "filename": "build_src.py", "nloc": 7, "complexity": 3, "token_count": 41, "parameters": [ "self" ], "start_line": 84, "end_line": 93, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "build_library_sources", "long_name": "build_library_sources( self , lib_name , build_info )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 59, "parameters": [ "self", "lib_name", "build_info" ], "start_line": 95, "end_line": 106, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 1 }, { "name": "build_extension_sources", "long_name": "build_extension_sources( self , ext )", "filename": "build_src.py", "nloc": 23, "complexity": 4, "token_count": 207, "parameters": [ "self", "ext" ], "start_line": 108, "end_line": 141, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 34, "top_nesting_level": 1 }, { "name": "generate_sources", "long_name": "generate_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 29, "complexity": 9, "token_count": 193, "parameters": [ "self", "sources", "extension" ], "start_line": 143, "end_line": 171, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 1 }, { "name": "filter_py_files", "long_name": "filter_py_files( self , sources )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "self", "sources" ], "start_line": 173, "end_line": 182, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "template_sources", "long_name": "template_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 22, "complexity": 6, "token_count": 170, "parameters": [ "self", "sources", "extension" ], "start_line": 184, "end_line": 205, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "f2py_sources", "long_name": "f2py_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 110, "complexity": 24, "token_count": 874, "parameters": [ "self", "sources", "extension" ], "start_line": 207, "end_line": 326, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 120, "top_nesting_level": 1 }, { "name": "swig_sources", "long_name": "swig_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 80, "complexity": 17, "token_count": 539, "parameters": [ "self", "sources", "extension" ], "start_line": 328, "end_line": 415, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 88, "top_nesting_level": 1 }, { "name": "appendpath", "long_name": "appendpath( prefix , path )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 101, "parameters": [ "prefix", "path" ], "start_line": 417, "end_line": 424, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "get_swig_target", "long_name": "get_swig_target( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 48, "parameters": [ "source" ], "start_line": 432, "end_line": 441, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "get_swig_modulename", "long_name": "get_swig_modulename( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "source" ], "start_line": 443, "end_line": 452, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "_find_swig_target", "long_name": "_find_swig_target( target_dir , name )", "filename": "build_src.py", "nloc": 6, "complexity": 3, "token_count": 47, "parameters": [ "target_dir", "name" ], "start_line": 454, "end_line": 459, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "get_f2py_modulename", "long_name": "get_f2py_modulename( source )", "filename": "build_src.py", "nloc": 13, "complexity": 4, "token_count": 65, "parameters": [ "source" ], "start_line": 468, "end_line": 480, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "backend_split", "long_name": "backend_split( self )", "filename": "build_src.py", "nloc": 21, "complexity": 4, "token_count": 210, "parameters": [ "self" ], "start_line": 122, "end_line": 157, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_src.py", "nloc": 14, "complexity": 1, "token_count": 66, "parameters": [ "self" ], "start_line": 66, "end_line": 79, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 1 }, { "name": "backend_split.func", "long_name": "backend_split.func( extension , src_dir )", "filename": "build_src.py", "nloc": 9, "complexity": 2, "token_count": 74, "parameters": [ "extension", "src_dir" ], "start_line": 129, "end_line": 137, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 3 }, { "name": "split_extension", "long_name": "split_extension( self , ext , backend )", "filename": "build_src.py", "nloc": 26, "complexity": 1, "token_count": 184, "parameters": [ "self", "ext", "backend" ], "start_line": 159, "end_line": 184, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_src.py", "nloc": 7, "complexity": 4, "token_count": 37, "parameters": [ "self" ], "start_line": 114, "end_line": 120, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "backend_split.func_init", "long_name": "backend_split.func_init( extension , src_dir )", "filename": "build_src.py", "nloc": 6, "complexity": 2, "token_count": 44, "parameters": [ "extension", "src_dir" ], "start_line": 138, "end_line": 143, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 3 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_src.py", "nloc": 28, "complexity": 8, "token_count": 201, "parameters": [ "self" ], "start_line": 81, "end_line": 112, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 32, "top_nesting_level": 1 } ], "nloc": 507, "complexity": 105, "token_count": 3497, "diff_parsed": { "added": [ "from scipy_distutils.misc_util import fortran_ext_match, all_strings, dot_join", "from scipy_distutils.extension import Extension", "", "_split_ext_template = '''", "import os", "import sys", "", "_which = None, None", "_backends = %(backends)r", "if hasattr(sys, \"argv\"):", " i = -1", " for a in sys.argv:", " i += 1", " if a.lower()[2:] in _backends:", " _which = a.lower()[2:], \"command line\"", " del sys.argv[i]", " os.environ[_which[0].upper()] = _which[0]", " break", " del a", "", "if _which[0] is None:", " for b in _backends:", " if os.environ.get(b.upper(),None):", " _which = b, \"environment var\"", " break", " del b", "", "if _which[0] is None:", " _which = _backends[0], \"defaulted\"", "", "exec \"from _\" + _which[0] + \".%(name)s import *\"", "'''", "", " self.backends = None", " if self.backends is None:", " build_ext = self.get_finalized_command('build_ext')", " self.backends = build_ext.backends", " if self.backends is not None:", " self.backend_split()", " def backend_split(self):", " log.info('splitting extensions for backends: %s' % (self.backends))", " extensions = []", " backends = self.backends.split(',')", " for ext in self.extensions:", " name = ext.name.split('.')[-1]", " fullname = self.get_ext_fullname(ext.name)", " def func(extension, src_dir):", " source = os.path.join(os.path.dirname(src_dir),name+'.py')", " if newer(__file__, source):", " f = open(source,'w')", " f.write(_split_ext_template \\", " % {'name':name,'fullname':fullname,", " 'backends':backends})", " f.close()", " return [ source ]", " def func_init(extension, src_dir):", " source = os.path.join(src_dir,'__init__.py')", " if newer(__file__, source):", " f = open(source,'w')", " f.close()", " return [source]", " for b in backends:", " new_ext = self.split_extension(ext,b)", " new_ext.sources.append(func_init)", " extensions.append(new_ext)", "", " new_package = dot_join(*(ext.name.split('.')[:-1]+['_'+b]))", " new_package_dir = os.path.join(*([self.build_src]+ext.name.split('.')[:-1]+['_'+b]))", " if new_package not in self.distribution.packages:", " self.distribution.packages.append(new_package)", " self.distribution.package_dir[new_package] = new_package_dir", "", " ext.sources = [func]", " extensions.append(ext)", " self.extensions[:] = extensions", "", " def split_extension(self, ext, backend):", " fullname = self.get_ext_fullname(ext.name)", " modpath = fullname.split('.')", " package = '.'.join(modpath[0:-1])", " name = modpath[-1]", " macros = []", " macros.append((backend.upper(),None))", " new_ext = Extension(name = dot_join(package,'_%s.%s' % (backend,name)),", " sources = ext.sources,", " include_dirs = ext.include_dirs,", " define_macros = ext.define_macros + macros,", " undef_macros = ext.undef_macros,", " library_dirs = ext.library_dirs,", " libraries = ext.libraries,", " runtime_library_dirs = ext.runtime_library_dirs,", " extra_objects = ext.extra_objects,", " extra_compile_args = ext.extra_compile_args,", " extra_link_args = ext.extra_link_args,", " export_symbols = ext.export_symbols,", " depends = ext.depends,", " language = ext.language,", " f2py_options = ext.f2py_options,", " module_dirs = ext.module_dirs", " )", " new_ext.backend = backend", " return new_ext", "" ], "deleted": [ "from scipy_distutils.misc_util import fortran_ext_match, all_strings" ] } } ] }, { "hash": "b5ec49d17c699cc58510a9159a3e9832b06888d6", "msg": "OSX fix for g77", "author": { "name": "Robert Kern", "email": "robert.kern@gmail.com" }, "committer": { "name": "Robert Kern", "email": "robert.kern@gmail.com" }, "author_date": "2005-03-11T00:53:24+00:00", "author_timezone": 0, "committer_date": "2005-03-11T00:53:24+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "3ac42a6d78dda727f25dc44d08b9c1539b502203" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 4, "insertions": 23, "lines": 27, "files": 1, "dmm_unit_size": 0.0, "dmm_unit_complexity": 0.0, "dmm_unit_interfacing": 1.0, "modified_files": [ { "old_path": "scipy_distutils/gnufcompiler.py", "new_path": "scipy_distutils/gnufcompiler.py", "filename": "gnufcompiler.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -2,6 +2,7 @@\n import re\n import os\n import sys\n+import warnings\n \n from cpuinfo import cpu\n from fcompiler import FCompiler\n@@ -49,11 +50,27 @@ class GnuFCompiler(FCompiler):\n def get_flags_linker_so(self):\n opt = []\n if sys.platform=='darwin':\n- if os.path.realpath(sys.executable).startswith('/System'):\n+ try:\n+ import MacOS\n+ except ImportError:\n+ is_framework = False\n+ else:\n+ is_framework = (MacOS.linkmodel == 'framework')\n+ if is_framework:\n # This is when Python is from Apple framework\n- opt.extend([\"-Wl,-framework\",\"-Wl,Python\"])\n- #else we are running in Fink python.\n- opt.extend([\"-lcc_dynamic\",\"-bundle\"])\n+ target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)\n+ if target is None:\n+ target = '10.3'\n+ major, minor = target.split('.')\n+ if int(minor) < 3:\n+ minor = '3'\n+ warnings.warn('Environment variable ' \n+ 'MACOSX_DEPLOYMENT_TARGET reset to 10.3')\n+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = '%s.%s' % (major,\n+ minor)\n+ \n+ opt.extend(['-undefined', 'dynamic_lookup'])\n+ opt.append('-bundle')\n else:\n opt.append(\"-shared\")\n if sys.platform[:5]=='sunos':\n@@ -96,6 +113,8 @@ def get_libraries(self):\n opt.append('gcc')\n if g2c is not None:\n opt.append(g2c)\n+ if sys.platform == 'darwin':\n+ opt.append('cc_dynamic')\n return opt\n \n def get_flags_debug(self):\n", "added_lines": 23, "deleted_lines": 4, "source_code": "\nimport re\nimport os\nimport sys\nimport warnings\n\nfrom cpuinfo import cpu\nfrom fcompiler import FCompiler\nfrom exec_command import exec_command, find_executable\n\nclass GnuFCompiler(FCompiler):\n\n compiler_type = 'gnu'\n version_pattern = r'GNU Fortran ((\\(GCC[^\\)]*(\\)\\)|\\)))|)\\s*'\\\n '(?P[^\\s*\\)]+)'\n\n # 'g77 --version' results\n # SunOS: GNU Fortran (GCC 3.2) 3.2 20020814 (release)\n # Debian: GNU Fortran (GCC) 3.3.3 20040110 (prerelease) (Debian)\n # GNU Fortran (GCC) 3.3.3 (Debian 20040401)\n # GNU Fortran 0.5.25 20010319 (prerelease)\n # Redhat: GNU Fortran (GCC 3.2.2 20030222 (Red Hat Linux 3.2.2-5)) 3.2.2 20030222 (Red Hat Linux 3.2.2-5)\n\n for fc_exe in map(find_executable,['g77','f77']):\n if os.path.isfile(fc_exe):\n break\n executables = {\n 'version_cmd' : [fc_exe,\"--version\"],\n 'compiler_f77' : [fc_exe,\"-Wall\",\"-fno-second-underscore\"],\n 'compiler_f90' : None,\n 'compiler_fix' : None,\n 'linker_so' : [fc_exe],\n 'archiver' : [\"ar\", \"-cr\"],\n 'ranlib' : [\"ranlib\"],\n }\n module_dir_switch = None\n module_include_switch = None\n\n # Cygwin: f771: warning: -fPIC ignored for target (all code is position independent)\n if os.name != 'nt' and sys.platform!='cygwin':\n pic_flags = ['-fPIC']\n\n #def get_linker_so(self):\n # # win32 linking should be handled by standard linker\n # # Darwin g77 cannot be used as a linker.\n # #if re.match(r'(darwin)', sys.platform):\n # # return\n # return FCompiler.get_linker_so(self)\n\n def get_flags_linker_so(self):\n opt = []\n if sys.platform=='darwin':\n try:\n import MacOS\n except ImportError:\n is_framework = False\n else:\n is_framework = (MacOS.linkmodel == 'framework')\n if is_framework:\n # This is when Python is from Apple framework\n target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)\n if target is None:\n target = '10.3'\n major, minor = target.split('.')\n if int(minor) < 3:\n minor = '3'\n warnings.warn('Environment variable ' \n 'MACOSX_DEPLOYMENT_TARGET reset to 10.3')\n os.environ['MACOSX_DEPLOYMENT_TARGET'] = '%s.%s' % (major,\n minor)\n \n opt.extend(['-undefined', 'dynamic_lookup'])\n opt.append('-bundle')\n else:\n opt.append(\"-shared\")\n if sys.platform[:5]=='sunos':\n # SunOS often has dynamically loaded symbols defined in the\n # static library libg2c.a The linker doesn't like this. To\n # ignore the problem, use the -mimpure-text flag. It isn't\n # the safest thing, but seems to work. 'man gcc' says:\n # \".. Instead of using -mimpure-text, you should compile all\n # source code with -fpic or -fPIC.\"\n opt.append('-mimpure-text')\n return opt\n\n def get_libgcc_dir(self):\n status, output = exec_command('%s -print-libgcc-file-name' \\\n % (self.compiler_f77[0]),use_tee=0) \n if not status:\n return os.path.dirname(output)\n return\n\n def get_library_dirs(self):\n opt = []\n if sys.platform[:5] != 'linux':\n d = self.get_libgcc_dir()\n if d:\n opt.append(d)\n return opt\n\n def get_libraries(self):\n opt = []\n d = self.get_libgcc_dir()\n if d is not None:\n g2c = 'g2c-pic'\n f = self.static_lib_format % (g2c, self.static_lib_extension)\n if not os.path.isfile(os.path.join(d,f)):\n g2c = 'g2c'\n else:\n g2c = 'g2c'\n \n if sys.platform=='win32':\n opt.append('gcc')\n if g2c is not None:\n opt.append(g2c)\n if sys.platform == 'darwin':\n opt.append('cc_dynamic')\n return opt\n\n def get_flags_debug(self):\n return ['-g']\n\n def get_flags_opt(self):\n if self.get_version()<='3.3.3':\n # With this compiler version building Fortran BLAS/LAPACK\n # with -O3 caused failures in lib.lapack heevr,syevr tests.\n opt = ['-O2']\n else:\n opt = ['-O3']\n opt.append('-funroll-loops')\n return opt\n\n def get_flags_arch(self):\n opt = []\n if sys.platform=='darwin':\n if os.name != 'posix':\n # this should presumably correspond to Apple\n if cpu.is_ppc():\n opt.append('-arch ppc')\n elif cpu.is_i386():\n opt.append('-arch i386')\n for a in '601 602 603 603e 604 604e 620 630 740 7400 7450 750'\\\n '403 505 801 821 823 860'.split():\n if getattr(cpu,'is_ppc%s'%a)():\n opt.append('-mcpu='+a)\n opt.append('-mtune='+a)\n break \n return opt\n march_flag = 1\n # 0.5.25 corresponds to 2.95.x\n if self.get_version() == '0.5.26': # gcc 3.0\n if cpu.is_AthlonK6():\n opt.append('-march=k6')\n elif cpu.is_AthlonK7():\n opt.append('-march=athlon')\n else:\n march_flag = 0\n # Note: gcc 3.2 on win32 has breakage with -march specified\n elif self.get_version() >= '3.1.1' \\\n and not sys.platform=='win32': # gcc >= 3.1.1\n if cpu.is_AthlonK6():\n opt.append('-march=k6')\n elif cpu.is_AthlonK6_2():\n opt.append('-march=k6-2')\n elif cpu.is_AthlonK6_3():\n opt.append('-march=k6-3')\n elif cpu.is_AthlonK7():\n opt.append('-march=athlon')\n elif cpu.is_AthlonMP():\n opt.append('-march=athlon-mp')\n # there's also: athlon-tbird, athlon-4, athlon-xp\n elif cpu.is_PentiumIV():\n opt.append('-march=pentium4')\n elif cpu.is_PentiumIII():\n opt.append('-march=pentium3')\n elif cpu.is_PentiumII():\n opt.append('-march=pentium2')\n else:\n march_flag = 0\n if self.get_version() >= '3.4' and not march_flag:\n march_flag = 1\n if cpu.is_Opteron():\n opt.append('-march=opteron')\n elif cpu.is_Athlon64():\n opt.append('-march=athlon64')\n else:\n march_flag = 0\n if cpu.has_mmx(): opt.append('-mmmx') \n if self.get_version() > '3.2.2':\n if cpu.has_sse2(): opt.append('-msse2')\n if cpu.has_sse(): opt.append('-msse')\n if self.get_version() >= '3.4':\n if cpu.has_sse3(): opt.append('-msse3')\n if cpu.has_3dnow(): opt.append('-m3dnow')\n else:\n march_flag = 0\n if march_flag:\n pass\n elif cpu.is_i686():\n opt.append('-march=i686')\n elif cpu.is_i586():\n opt.append('-march=i586')\n elif cpu.is_i486():\n opt.append('-march=i486')\n elif cpu.is_i386():\n opt.append('-march=i386')\n if cpu.is_Intel():\n opt.extend(['-malign-double','-fomit-frame-pointer'])\n return opt\n\nif __name__ == '__main__':\n from scipy_distutils import log\n log.set_verbosity(2)\n from fcompiler import new_fcompiler\n #compiler = new_fcompiler(compiler='gnu')\n compiler = GnuFCompiler()\n compiler.customize()\n print compiler.get_version()\n", "source_code_before": "\nimport re\nimport os\nimport sys\n\nfrom cpuinfo import cpu\nfrom fcompiler import FCompiler\nfrom exec_command import exec_command, find_executable\n\nclass GnuFCompiler(FCompiler):\n\n compiler_type = 'gnu'\n version_pattern = r'GNU Fortran ((\\(GCC[^\\)]*(\\)\\)|\\)))|)\\s*'\\\n '(?P[^\\s*\\)]+)'\n\n # 'g77 --version' results\n # SunOS: GNU Fortran (GCC 3.2) 3.2 20020814 (release)\n # Debian: GNU Fortran (GCC) 3.3.3 20040110 (prerelease) (Debian)\n # GNU Fortran (GCC) 3.3.3 (Debian 20040401)\n # GNU Fortran 0.5.25 20010319 (prerelease)\n # Redhat: GNU Fortran (GCC 3.2.2 20030222 (Red Hat Linux 3.2.2-5)) 3.2.2 20030222 (Red Hat Linux 3.2.2-5)\n\n for fc_exe in map(find_executable,['g77','f77']):\n if os.path.isfile(fc_exe):\n break\n executables = {\n 'version_cmd' : [fc_exe,\"--version\"],\n 'compiler_f77' : [fc_exe,\"-Wall\",\"-fno-second-underscore\"],\n 'compiler_f90' : None,\n 'compiler_fix' : None,\n 'linker_so' : [fc_exe],\n 'archiver' : [\"ar\", \"-cr\"],\n 'ranlib' : [\"ranlib\"],\n }\n module_dir_switch = None\n module_include_switch = None\n\n # Cygwin: f771: warning: -fPIC ignored for target (all code is position independent)\n if os.name != 'nt' and sys.platform!='cygwin':\n pic_flags = ['-fPIC']\n\n #def get_linker_so(self):\n # # win32 linking should be handled by standard linker\n # # Darwin g77 cannot be used as a linker.\n # #if re.match(r'(darwin)', sys.platform):\n # # return\n # return FCompiler.get_linker_so(self)\n\n def get_flags_linker_so(self):\n opt = []\n if sys.platform=='darwin':\n if os.path.realpath(sys.executable).startswith('/System'):\n # This is when Python is from Apple framework\n opt.extend([\"-Wl,-framework\",\"-Wl,Python\"])\n #else we are running in Fink python.\n opt.extend([\"-lcc_dynamic\",\"-bundle\"])\n else:\n opt.append(\"-shared\")\n if sys.platform[:5]=='sunos':\n # SunOS often has dynamically loaded symbols defined in the\n # static library libg2c.a The linker doesn't like this. To\n # ignore the problem, use the -mimpure-text flag. It isn't\n # the safest thing, but seems to work. 'man gcc' says:\n # \".. Instead of using -mimpure-text, you should compile all\n # source code with -fpic or -fPIC.\"\n opt.append('-mimpure-text')\n return opt\n\n def get_libgcc_dir(self):\n status, output = exec_command('%s -print-libgcc-file-name' \\\n % (self.compiler_f77[0]),use_tee=0) \n if not status:\n return os.path.dirname(output)\n return\n\n def get_library_dirs(self):\n opt = []\n if sys.platform[:5] != 'linux':\n d = self.get_libgcc_dir()\n if d:\n opt.append(d)\n return opt\n\n def get_libraries(self):\n opt = []\n d = self.get_libgcc_dir()\n if d is not None:\n g2c = 'g2c-pic'\n f = self.static_lib_format % (g2c, self.static_lib_extension)\n if not os.path.isfile(os.path.join(d,f)):\n g2c = 'g2c'\n else:\n g2c = 'g2c'\n \n if sys.platform=='win32':\n opt.append('gcc')\n if g2c is not None:\n opt.append(g2c)\n return opt\n\n def get_flags_debug(self):\n return ['-g']\n\n def get_flags_opt(self):\n if self.get_version()<='3.3.3':\n # With this compiler version building Fortran BLAS/LAPACK\n # with -O3 caused failures in lib.lapack heevr,syevr tests.\n opt = ['-O2']\n else:\n opt = ['-O3']\n opt.append('-funroll-loops')\n return opt\n\n def get_flags_arch(self):\n opt = []\n if sys.platform=='darwin':\n if os.name != 'posix':\n # this should presumably correspond to Apple\n if cpu.is_ppc():\n opt.append('-arch ppc')\n elif cpu.is_i386():\n opt.append('-arch i386')\n for a in '601 602 603 603e 604 604e 620 630 740 7400 7450 750'\\\n '403 505 801 821 823 860'.split():\n if getattr(cpu,'is_ppc%s'%a)():\n opt.append('-mcpu='+a)\n opt.append('-mtune='+a)\n break \n return opt\n march_flag = 1\n # 0.5.25 corresponds to 2.95.x\n if self.get_version() == '0.5.26': # gcc 3.0\n if cpu.is_AthlonK6():\n opt.append('-march=k6')\n elif cpu.is_AthlonK7():\n opt.append('-march=athlon')\n else:\n march_flag = 0\n # Note: gcc 3.2 on win32 has breakage with -march specified\n elif self.get_version() >= '3.1.1' \\\n and not sys.platform=='win32': # gcc >= 3.1.1\n if cpu.is_AthlonK6():\n opt.append('-march=k6')\n elif cpu.is_AthlonK6_2():\n opt.append('-march=k6-2')\n elif cpu.is_AthlonK6_3():\n opt.append('-march=k6-3')\n elif cpu.is_AthlonK7():\n opt.append('-march=athlon')\n elif cpu.is_AthlonMP():\n opt.append('-march=athlon-mp')\n # there's also: athlon-tbird, athlon-4, athlon-xp\n elif cpu.is_PentiumIV():\n opt.append('-march=pentium4')\n elif cpu.is_PentiumIII():\n opt.append('-march=pentium3')\n elif cpu.is_PentiumII():\n opt.append('-march=pentium2')\n else:\n march_flag = 0\n if self.get_version() >= '3.4' and not march_flag:\n march_flag = 1\n if cpu.is_Opteron():\n opt.append('-march=opteron')\n elif cpu.is_Athlon64():\n opt.append('-march=athlon64')\n else:\n march_flag = 0\n if cpu.has_mmx(): opt.append('-mmmx') \n if self.get_version() > '3.2.2':\n if cpu.has_sse2(): opt.append('-msse2')\n if cpu.has_sse(): opt.append('-msse')\n if self.get_version() >= '3.4':\n if cpu.has_sse3(): opt.append('-msse3')\n if cpu.has_3dnow(): opt.append('-m3dnow')\n else:\n march_flag = 0\n if march_flag:\n pass\n elif cpu.is_i686():\n opt.append('-march=i686')\n elif cpu.is_i586():\n opt.append('-march=i586')\n elif cpu.is_i486():\n opt.append('-march=i486')\n elif cpu.is_i386():\n opt.append('-march=i386')\n if cpu.is_Intel():\n opt.extend(['-malign-double','-fomit-frame-pointer'])\n return opt\n\nif __name__ == '__main__':\n from scipy_distutils import log\n log.set_verbosity(2)\n from fcompiler import new_fcompiler\n #compiler = new_fcompiler(compiler='gnu')\n compiler = GnuFCompiler()\n compiler.customize()\n print compiler.get_version()\n", "methods": [ { "name": "get_flags_linker_so", "long_name": "get_flags_linker_so( self )", "filename": "gnufcompiler.py", "nloc": 27, "complexity": 7, "token_count": 145, "parameters": [ "self" ], "start_line": 50, "end_line": 84, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 35, "top_nesting_level": 1 }, { "name": "get_libgcc_dir", "long_name": "get_libgcc_dir( self )", "filename": "gnufcompiler.py", "nloc": 6, "complexity": 2, "token_count": 41, "parameters": [ "self" ], "start_line": 86, "end_line": 91, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "get_library_dirs", "long_name": "get_library_dirs( self )", "filename": "gnufcompiler.py", "nloc": 7, "complexity": 3, "token_count": 38, "parameters": [ "self" ], "start_line": 93, "end_line": 99, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self )", "filename": "gnufcompiler.py", "nloc": 17, "complexity": 6, "token_count": 106, "parameters": [ "self" ], "start_line": 101, "end_line": 118, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 18, "top_nesting_level": 1 }, { "name": "get_flags_debug", "long_name": "get_flags_debug( self )", "filename": "gnufcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 120, "end_line": 121, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "gnufcompiler.py", "nloc": 7, "complexity": 2, "token_count": 34, "parameters": [ "self" ], "start_line": 123, "end_line": 131, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "get_flags_arch", "long_name": "get_flags_arch( self )", "filename": "gnufcompiler.py", "nloc": 73, "complexity": 37, "token_count": 469, "parameters": [ "self" ], "start_line": 133, "end_line": 209, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 77, "top_nesting_level": 1 } ], "methods_before": [ { "name": "get_flags_linker_so", "long_name": "get_flags_linker_so( self )", "filename": "gnufcompiler.py", "nloc": 11, "complexity": 4, "token_count": 80, "parameters": [ "self" ], "start_line": 49, "end_line": 67, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 19, "top_nesting_level": 1 }, { "name": "get_libgcc_dir", "long_name": "get_libgcc_dir( self )", "filename": "gnufcompiler.py", "nloc": 6, "complexity": 2, "token_count": 41, "parameters": [ "self" ], "start_line": 69, "end_line": 74, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "get_library_dirs", "long_name": "get_library_dirs( self )", "filename": "gnufcompiler.py", "nloc": 7, "complexity": 3, "token_count": 38, "parameters": [ "self" ], "start_line": 76, "end_line": 82, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self )", "filename": "gnufcompiler.py", "nloc": 15, "complexity": 5, "token_count": 93, "parameters": [ "self" ], "start_line": 84, "end_line": 99, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 1 }, { "name": "get_flags_debug", "long_name": "get_flags_debug( self )", "filename": "gnufcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 101, "end_line": 102, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "gnufcompiler.py", "nloc": 7, "complexity": 2, "token_count": 34, "parameters": [ "self" ], "start_line": 104, "end_line": 112, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "get_flags_arch", "long_name": "get_flags_arch( self )", "filename": "gnufcompiler.py", "nloc": 73, "complexity": 37, "token_count": 469, "parameters": [ "self" ], "start_line": 114, "end_line": 190, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 77, "top_nesting_level": 1 } ], "changed_methods": [ { "name": "get_libraries", "long_name": "get_libraries( self )", "filename": "gnufcompiler.py", "nloc": 17, "complexity": 6, "token_count": 106, "parameters": [ "self" ], "start_line": 101, "end_line": 118, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 18, "top_nesting_level": 1 }, { "name": "get_flags_linker_so", "long_name": "get_flags_linker_so( self )", "filename": "gnufcompiler.py", "nloc": 27, "complexity": 7, "token_count": 145, "parameters": [ "self" ], "start_line": 50, "end_line": 84, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 35, "top_nesting_level": 1 } ], "nloc": 173, "complexity": 58, "token_count": 1020, "diff_parsed": { "added": [ "import warnings", " try:", " import MacOS", " except ImportError:", " is_framework = False", " else:", " is_framework = (MacOS.linkmodel == 'framework')", " if is_framework:", " target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)", " if target is None:", " target = '10.3'", " major, minor = target.split('.')", " if int(minor) < 3:", " minor = '3'", " warnings.warn('Environment variable '", " 'MACOSX_DEPLOYMENT_TARGET reset to 10.3')", " os.environ['MACOSX_DEPLOYMENT_TARGET'] = '%s.%s' % (major,", " minor)", "", " opt.extend(['-undefined', 'dynamic_lookup'])", " opt.append('-bundle')", " if sys.platform == 'darwin':", " opt.append('cc_dynamic')" ], "deleted": [ " if os.path.realpath(sys.executable).startswith('/System'):", " opt.extend([\"-Wl,-framework\",\"-Wl,Python\"])", " #else we are running in Fink python.", " opt.extend([\"-lcc_dynamic\",\"-bundle\"])" ] } } ] }, { "hash": "ef6dabc7a250e42f1fef7c7a684b1fbcbc221e01", "msg": "Firstly, fix FORTRAN fix for Fink.", "author": { "name": "Robert Kern", "email": "robert.kern@gmail.com" }, "committer": { "name": "Robert Kern", "email": "robert.kern@gmail.com" }, "author_date": "2005-03-12T01:48:10+00:00", "author_timezone": 0, "committer_date": "2005-03-12T01:48:10+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "b5ec49d17c699cc58510a9159a3e9832b06888d6" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 22, "insertions": 13, "lines": 35, "files": 1, "dmm_unit_size": 1.0, "dmm_unit_complexity": 1.0, "dmm_unit_interfacing": 0.0, "modified_files": [ { "old_path": "scipy_distutils/gnufcompiler.py", "new_path": "scipy_distutils/gnufcompiler.py", "filename": "gnufcompiler.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -29,7 +29,7 @@ class GnuFCompiler(FCompiler):\n 'compiler_f77' : [fc_exe,\"-Wall\",\"-fno-second-underscore\"],\n 'compiler_f90' : None,\n 'compiler_fix' : None,\n- 'linker_so' : [fc_exe],\n+ 'linker_so' : [fc_exe,\"-Wall\"],\n 'archiver' : [\"ar\", \"-cr\"],\n 'ranlib' : [\"ranlib\"],\n }\n@@ -50,27 +50,18 @@ class GnuFCompiler(FCompiler):\n def get_flags_linker_so(self):\n opt = []\n if sys.platform=='darwin':\n- try:\n- import MacOS\n- except ImportError:\n- is_framework = False\n- else:\n- is_framework = (MacOS.linkmodel == 'framework')\n- if is_framework:\n- # This is when Python is from Apple framework\n- target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)\n- if target is None:\n- target = '10.3'\n- major, minor = target.split('.')\n- if int(minor) < 3:\n- minor = '3'\n- warnings.warn('Environment variable ' \n- 'MACOSX_DEPLOYMENT_TARGET reset to 10.3')\n- os.environ['MACOSX_DEPLOYMENT_TARGET'] = '%s.%s' % (major,\n- minor)\n- \n- opt.extend(['-undefined', 'dynamic_lookup'])\n- opt.append('-bundle')\n+ target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)\n+ if target is None:\n+ target = '10.3'\n+ major, minor = target.split('.')\n+ if int(minor) < 3:\n+ minor = '3'\n+ warnings.warn('Environment variable ' \n+ 'MACOSX_DEPLOYMENT_TARGET reset to 10.3')\n+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = '%s.%s' % (major,\n+ minor)\n+ \n+ opt.extend(['-undefined', 'dynamic_lookup', '-bundle'])\n else:\n opt.append(\"-shared\")\n if sys.platform[:5]=='sunos':\n", "added_lines": 13, "deleted_lines": 22, "source_code": "\nimport re\nimport os\nimport sys\nimport warnings\n\nfrom cpuinfo import cpu\nfrom fcompiler import FCompiler\nfrom exec_command import exec_command, find_executable\n\nclass GnuFCompiler(FCompiler):\n\n compiler_type = 'gnu'\n version_pattern = r'GNU Fortran ((\\(GCC[^\\)]*(\\)\\)|\\)))|)\\s*'\\\n '(?P[^\\s*\\)]+)'\n\n # 'g77 --version' results\n # SunOS: GNU Fortran (GCC 3.2) 3.2 20020814 (release)\n # Debian: GNU Fortran (GCC) 3.3.3 20040110 (prerelease) (Debian)\n # GNU Fortran (GCC) 3.3.3 (Debian 20040401)\n # GNU Fortran 0.5.25 20010319 (prerelease)\n # Redhat: GNU Fortran (GCC 3.2.2 20030222 (Red Hat Linux 3.2.2-5)) 3.2.2 20030222 (Red Hat Linux 3.2.2-5)\n\n for fc_exe in map(find_executable,['g77','f77']):\n if os.path.isfile(fc_exe):\n break\n executables = {\n 'version_cmd' : [fc_exe,\"--version\"],\n 'compiler_f77' : [fc_exe,\"-Wall\",\"-fno-second-underscore\"],\n 'compiler_f90' : None,\n 'compiler_fix' : None,\n 'linker_so' : [fc_exe,\"-Wall\"],\n 'archiver' : [\"ar\", \"-cr\"],\n 'ranlib' : [\"ranlib\"],\n }\n module_dir_switch = None\n module_include_switch = None\n\n # Cygwin: f771: warning: -fPIC ignored for target (all code is position independent)\n if os.name != 'nt' and sys.platform!='cygwin':\n pic_flags = ['-fPIC']\n\n #def get_linker_so(self):\n # # win32 linking should be handled by standard linker\n # # Darwin g77 cannot be used as a linker.\n # #if re.match(r'(darwin)', sys.platform):\n # # return\n # return FCompiler.get_linker_so(self)\n\n def get_flags_linker_so(self):\n opt = []\n if sys.platform=='darwin':\n target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)\n if target is None:\n target = '10.3'\n major, minor = target.split('.')\n if int(minor) < 3:\n minor = '3'\n warnings.warn('Environment variable ' \n 'MACOSX_DEPLOYMENT_TARGET reset to 10.3')\n os.environ['MACOSX_DEPLOYMENT_TARGET'] = '%s.%s' % (major,\n minor)\n \n opt.extend(['-undefined', 'dynamic_lookup', '-bundle'])\n else:\n opt.append(\"-shared\")\n if sys.platform[:5]=='sunos':\n # SunOS often has dynamically loaded symbols defined in the\n # static library libg2c.a The linker doesn't like this. To\n # ignore the problem, use the -mimpure-text flag. It isn't\n # the safest thing, but seems to work. 'man gcc' says:\n # \".. Instead of using -mimpure-text, you should compile all\n # source code with -fpic or -fPIC.\"\n opt.append('-mimpure-text')\n return opt\n\n def get_libgcc_dir(self):\n status, output = exec_command('%s -print-libgcc-file-name' \\\n % (self.compiler_f77[0]),use_tee=0) \n if not status:\n return os.path.dirname(output)\n return\n\n def get_library_dirs(self):\n opt = []\n if sys.platform[:5] != 'linux':\n d = self.get_libgcc_dir()\n if d:\n opt.append(d)\n return opt\n\n def get_libraries(self):\n opt = []\n d = self.get_libgcc_dir()\n if d is not None:\n g2c = 'g2c-pic'\n f = self.static_lib_format % (g2c, self.static_lib_extension)\n if not os.path.isfile(os.path.join(d,f)):\n g2c = 'g2c'\n else:\n g2c = 'g2c'\n \n if sys.platform=='win32':\n opt.append('gcc')\n if g2c is not None:\n opt.append(g2c)\n if sys.platform == 'darwin':\n opt.append('cc_dynamic')\n return opt\n\n def get_flags_debug(self):\n return ['-g']\n\n def get_flags_opt(self):\n if self.get_version()<='3.3.3':\n # With this compiler version building Fortran BLAS/LAPACK\n # with -O3 caused failures in lib.lapack heevr,syevr tests.\n opt = ['-O2']\n else:\n opt = ['-O3']\n opt.append('-funroll-loops')\n return opt\n\n def get_flags_arch(self):\n opt = []\n if sys.platform=='darwin':\n if os.name != 'posix':\n # this should presumably correspond to Apple\n if cpu.is_ppc():\n opt.append('-arch ppc')\n elif cpu.is_i386():\n opt.append('-arch i386')\n for a in '601 602 603 603e 604 604e 620 630 740 7400 7450 750'\\\n '403 505 801 821 823 860'.split():\n if getattr(cpu,'is_ppc%s'%a)():\n opt.append('-mcpu='+a)\n opt.append('-mtune='+a)\n break \n return opt\n march_flag = 1\n # 0.5.25 corresponds to 2.95.x\n if self.get_version() == '0.5.26': # gcc 3.0\n if cpu.is_AthlonK6():\n opt.append('-march=k6')\n elif cpu.is_AthlonK7():\n opt.append('-march=athlon')\n else:\n march_flag = 0\n # Note: gcc 3.2 on win32 has breakage with -march specified\n elif self.get_version() >= '3.1.1' \\\n and not sys.platform=='win32': # gcc >= 3.1.1\n if cpu.is_AthlonK6():\n opt.append('-march=k6')\n elif cpu.is_AthlonK6_2():\n opt.append('-march=k6-2')\n elif cpu.is_AthlonK6_3():\n opt.append('-march=k6-3')\n elif cpu.is_AthlonK7():\n opt.append('-march=athlon')\n elif cpu.is_AthlonMP():\n opt.append('-march=athlon-mp')\n # there's also: athlon-tbird, athlon-4, athlon-xp\n elif cpu.is_PentiumIV():\n opt.append('-march=pentium4')\n elif cpu.is_PentiumIII():\n opt.append('-march=pentium3')\n elif cpu.is_PentiumII():\n opt.append('-march=pentium2')\n else:\n march_flag = 0\n if self.get_version() >= '3.4' and not march_flag:\n march_flag = 1\n if cpu.is_Opteron():\n opt.append('-march=opteron')\n elif cpu.is_Athlon64():\n opt.append('-march=athlon64')\n else:\n march_flag = 0\n if cpu.has_mmx(): opt.append('-mmmx') \n if self.get_version() > '3.2.2':\n if cpu.has_sse2(): opt.append('-msse2')\n if cpu.has_sse(): opt.append('-msse')\n if self.get_version() >= '3.4':\n if cpu.has_sse3(): opt.append('-msse3')\n if cpu.has_3dnow(): opt.append('-m3dnow')\n else:\n march_flag = 0\n if march_flag:\n pass\n elif cpu.is_i686():\n opt.append('-march=i686')\n elif cpu.is_i586():\n opt.append('-march=i586')\n elif cpu.is_i486():\n opt.append('-march=i486')\n elif cpu.is_i386():\n opt.append('-march=i386')\n if cpu.is_Intel():\n opt.extend(['-malign-double','-fomit-frame-pointer'])\n return opt\n\nif __name__ == '__main__':\n from scipy_distutils import log\n log.set_verbosity(2)\n from fcompiler import new_fcompiler\n #compiler = new_fcompiler(compiler='gnu')\n compiler = GnuFCompiler()\n compiler.customize()\n print compiler.get_version()\n", "source_code_before": "\nimport re\nimport os\nimport sys\nimport warnings\n\nfrom cpuinfo import cpu\nfrom fcompiler import FCompiler\nfrom exec_command import exec_command, find_executable\n\nclass GnuFCompiler(FCompiler):\n\n compiler_type = 'gnu'\n version_pattern = r'GNU Fortran ((\\(GCC[^\\)]*(\\)\\)|\\)))|)\\s*'\\\n '(?P[^\\s*\\)]+)'\n\n # 'g77 --version' results\n # SunOS: GNU Fortran (GCC 3.2) 3.2 20020814 (release)\n # Debian: GNU Fortran (GCC) 3.3.3 20040110 (prerelease) (Debian)\n # GNU Fortran (GCC) 3.3.3 (Debian 20040401)\n # GNU Fortran 0.5.25 20010319 (prerelease)\n # Redhat: GNU Fortran (GCC 3.2.2 20030222 (Red Hat Linux 3.2.2-5)) 3.2.2 20030222 (Red Hat Linux 3.2.2-5)\n\n for fc_exe in map(find_executable,['g77','f77']):\n if os.path.isfile(fc_exe):\n break\n executables = {\n 'version_cmd' : [fc_exe,\"--version\"],\n 'compiler_f77' : [fc_exe,\"-Wall\",\"-fno-second-underscore\"],\n 'compiler_f90' : None,\n 'compiler_fix' : None,\n 'linker_so' : [fc_exe],\n 'archiver' : [\"ar\", \"-cr\"],\n 'ranlib' : [\"ranlib\"],\n }\n module_dir_switch = None\n module_include_switch = None\n\n # Cygwin: f771: warning: -fPIC ignored for target (all code is position independent)\n if os.name != 'nt' and sys.platform!='cygwin':\n pic_flags = ['-fPIC']\n\n #def get_linker_so(self):\n # # win32 linking should be handled by standard linker\n # # Darwin g77 cannot be used as a linker.\n # #if re.match(r'(darwin)', sys.platform):\n # # return\n # return FCompiler.get_linker_so(self)\n\n def get_flags_linker_so(self):\n opt = []\n if sys.platform=='darwin':\n try:\n import MacOS\n except ImportError:\n is_framework = False\n else:\n is_framework = (MacOS.linkmodel == 'framework')\n if is_framework:\n # This is when Python is from Apple framework\n target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)\n if target is None:\n target = '10.3'\n major, minor = target.split('.')\n if int(minor) < 3:\n minor = '3'\n warnings.warn('Environment variable ' \n 'MACOSX_DEPLOYMENT_TARGET reset to 10.3')\n os.environ['MACOSX_DEPLOYMENT_TARGET'] = '%s.%s' % (major,\n minor)\n \n opt.extend(['-undefined', 'dynamic_lookup'])\n opt.append('-bundle')\n else:\n opt.append(\"-shared\")\n if sys.platform[:5]=='sunos':\n # SunOS often has dynamically loaded symbols defined in the\n # static library libg2c.a The linker doesn't like this. To\n # ignore the problem, use the -mimpure-text flag. It isn't\n # the safest thing, but seems to work. 'man gcc' says:\n # \".. Instead of using -mimpure-text, you should compile all\n # source code with -fpic or -fPIC.\"\n opt.append('-mimpure-text')\n return opt\n\n def get_libgcc_dir(self):\n status, output = exec_command('%s -print-libgcc-file-name' \\\n % (self.compiler_f77[0]),use_tee=0) \n if not status:\n return os.path.dirname(output)\n return\n\n def get_library_dirs(self):\n opt = []\n if sys.platform[:5] != 'linux':\n d = self.get_libgcc_dir()\n if d:\n opt.append(d)\n return opt\n\n def get_libraries(self):\n opt = []\n d = self.get_libgcc_dir()\n if d is not None:\n g2c = 'g2c-pic'\n f = self.static_lib_format % (g2c, self.static_lib_extension)\n if not os.path.isfile(os.path.join(d,f)):\n g2c = 'g2c'\n else:\n g2c = 'g2c'\n \n if sys.platform=='win32':\n opt.append('gcc')\n if g2c is not None:\n opt.append(g2c)\n if sys.platform == 'darwin':\n opt.append('cc_dynamic')\n return opt\n\n def get_flags_debug(self):\n return ['-g']\n\n def get_flags_opt(self):\n if self.get_version()<='3.3.3':\n # With this compiler version building Fortran BLAS/LAPACK\n # with -O3 caused failures in lib.lapack heevr,syevr tests.\n opt = ['-O2']\n else:\n opt = ['-O3']\n opt.append('-funroll-loops')\n return opt\n\n def get_flags_arch(self):\n opt = []\n if sys.platform=='darwin':\n if os.name != 'posix':\n # this should presumably correspond to Apple\n if cpu.is_ppc():\n opt.append('-arch ppc')\n elif cpu.is_i386():\n opt.append('-arch i386')\n for a in '601 602 603 603e 604 604e 620 630 740 7400 7450 750'\\\n '403 505 801 821 823 860'.split():\n if getattr(cpu,'is_ppc%s'%a)():\n opt.append('-mcpu='+a)\n opt.append('-mtune='+a)\n break \n return opt\n march_flag = 1\n # 0.5.25 corresponds to 2.95.x\n if self.get_version() == '0.5.26': # gcc 3.0\n if cpu.is_AthlonK6():\n opt.append('-march=k6')\n elif cpu.is_AthlonK7():\n opt.append('-march=athlon')\n else:\n march_flag = 0\n # Note: gcc 3.2 on win32 has breakage with -march specified\n elif self.get_version() >= '3.1.1' \\\n and not sys.platform=='win32': # gcc >= 3.1.1\n if cpu.is_AthlonK6():\n opt.append('-march=k6')\n elif cpu.is_AthlonK6_2():\n opt.append('-march=k6-2')\n elif cpu.is_AthlonK6_3():\n opt.append('-march=k6-3')\n elif cpu.is_AthlonK7():\n opt.append('-march=athlon')\n elif cpu.is_AthlonMP():\n opt.append('-march=athlon-mp')\n # there's also: athlon-tbird, athlon-4, athlon-xp\n elif cpu.is_PentiumIV():\n opt.append('-march=pentium4')\n elif cpu.is_PentiumIII():\n opt.append('-march=pentium3')\n elif cpu.is_PentiumII():\n opt.append('-march=pentium2')\n else:\n march_flag = 0\n if self.get_version() >= '3.4' and not march_flag:\n march_flag = 1\n if cpu.is_Opteron():\n opt.append('-march=opteron')\n elif cpu.is_Athlon64():\n opt.append('-march=athlon64')\n else:\n march_flag = 0\n if cpu.has_mmx(): opt.append('-mmmx') \n if self.get_version() > '3.2.2':\n if cpu.has_sse2(): opt.append('-msse2')\n if cpu.has_sse(): opt.append('-msse')\n if self.get_version() >= '3.4':\n if cpu.has_sse3(): opt.append('-msse3')\n if cpu.has_3dnow(): opt.append('-m3dnow')\n else:\n march_flag = 0\n if march_flag:\n pass\n elif cpu.is_i686():\n opt.append('-march=i686')\n elif cpu.is_i586():\n opt.append('-march=i586')\n elif cpu.is_i486():\n opt.append('-march=i486')\n elif cpu.is_i386():\n opt.append('-march=i386')\n if cpu.is_Intel():\n opt.extend(['-malign-double','-fomit-frame-pointer'])\n return opt\n\nif __name__ == '__main__':\n from scipy_distutils import log\n log.set_verbosity(2)\n from fcompiler import new_fcompiler\n #compiler = new_fcompiler(compiler='gnu')\n compiler = GnuFCompiler()\n compiler.customize()\n print compiler.get_version()\n", "methods": [ { "name": "get_flags_linker_so", "long_name": "get_flags_linker_so( self )", "filename": "gnufcompiler.py", "nloc": 19, "complexity": 5, "token_count": 117, "parameters": [ "self" ], "start_line": 50, "end_line": 75, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "get_libgcc_dir", "long_name": "get_libgcc_dir( self )", "filename": "gnufcompiler.py", "nloc": 6, "complexity": 2, "token_count": 41, "parameters": [ "self" ], "start_line": 77, "end_line": 82, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "get_library_dirs", "long_name": "get_library_dirs( self )", "filename": "gnufcompiler.py", "nloc": 7, "complexity": 3, "token_count": 38, "parameters": [ "self" ], "start_line": 84, "end_line": 90, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self )", "filename": "gnufcompiler.py", "nloc": 17, "complexity": 6, "token_count": 106, "parameters": [ "self" ], "start_line": 92, "end_line": 109, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 18, "top_nesting_level": 1 }, { "name": "get_flags_debug", "long_name": "get_flags_debug( self )", "filename": "gnufcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 111, "end_line": 112, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "gnufcompiler.py", "nloc": 7, "complexity": 2, "token_count": 34, "parameters": [ "self" ], "start_line": 114, "end_line": 122, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "get_flags_arch", "long_name": "get_flags_arch( self )", "filename": "gnufcompiler.py", "nloc": 73, "complexity": 37, "token_count": 469, "parameters": [ "self" ], "start_line": 124, "end_line": 200, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 77, "top_nesting_level": 1 } ], "methods_before": [ { "name": "get_flags_linker_so", "long_name": "get_flags_linker_so( self )", "filename": "gnufcompiler.py", "nloc": 27, "complexity": 7, "token_count": 145, "parameters": [ "self" ], "start_line": 50, "end_line": 84, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 35, "top_nesting_level": 1 }, { "name": "get_libgcc_dir", "long_name": "get_libgcc_dir( self )", "filename": "gnufcompiler.py", "nloc": 6, "complexity": 2, "token_count": 41, "parameters": [ "self" ], "start_line": 86, "end_line": 91, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "get_library_dirs", "long_name": "get_library_dirs( self )", "filename": "gnufcompiler.py", "nloc": 7, "complexity": 3, "token_count": 38, "parameters": [ "self" ], "start_line": 93, "end_line": 99, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self )", "filename": "gnufcompiler.py", "nloc": 17, "complexity": 6, "token_count": 106, "parameters": [ "self" ], "start_line": 101, "end_line": 118, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 18, "top_nesting_level": 1 }, { "name": "get_flags_debug", "long_name": "get_flags_debug( self )", "filename": "gnufcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 120, "end_line": 121, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "gnufcompiler.py", "nloc": 7, "complexity": 2, "token_count": 34, "parameters": [ "self" ], "start_line": 123, "end_line": 131, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "get_flags_arch", "long_name": "get_flags_arch( self )", "filename": "gnufcompiler.py", "nloc": 73, "complexity": 37, "token_count": 469, "parameters": [ "self" ], "start_line": 133, "end_line": 209, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 77, "top_nesting_level": 1 } ], "changed_methods": [ { "name": "get_flags_linker_so", "long_name": "get_flags_linker_so( self )", "filename": "gnufcompiler.py", "nloc": 19, "complexity": 5, "token_count": 117, "parameters": [ "self" ], "start_line": 50, "end_line": 75, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 } ], "nloc": 165, "complexity": 56, "token_count": 994, "diff_parsed": { "added": [ " 'linker_so' : [fc_exe,\"-Wall\"],", " target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)", " if target is None:", " target = '10.3'", " major, minor = target.split('.')", " if int(minor) < 3:", " minor = '3'", " warnings.warn('Environment variable '", " 'MACOSX_DEPLOYMENT_TARGET reset to 10.3')", " os.environ['MACOSX_DEPLOYMENT_TARGET'] = '%s.%s' % (major,", " minor)", "", " opt.extend(['-undefined', 'dynamic_lookup', '-bundle'])" ], "deleted": [ " 'linker_so' : [fc_exe],", " try:", " import MacOS", " except ImportError:", " is_framework = False", " else:", " is_framework = (MacOS.linkmodel == 'framework')", " if is_framework:", " # This is when Python is from Apple framework", " target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)", " if target is None:", " target = '10.3'", " major, minor = target.split('.')", " if int(minor) < 3:", " minor = '3'", " warnings.warn('Environment variable '", " 'MACOSX_DEPLOYMENT_TARGET reset to 10.3')", " os.environ['MACOSX_DEPLOYMENT_TARGET'] = '%s.%s' % (major,", " minor)", "", " opt.extend(['-undefined', 'dynamic_lookup'])", " opt.append('-bundle')" ] } } ] }, { "hash": "d851d5a9e8d6689ee28e8be873b30d631cff7039", "msg": "Fixed the detection of __file__ existence.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-03-13T23:16:34+00:00", "author_timezone": 0, "committer_date": "2005-03-13T23:16:34+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "ef6dabc7a250e42f1fef7c7a684b1fbcbc221e01" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 4, "insertions": 4, "lines": 8, "files": 1, "dmm_unit_size": null, "dmm_unit_complexity": null, "dmm_unit_interfacing": null, "modified_files": [ { "old_path": "scipy_distutils/system_info.py", "new_path": "scipy_distutils/system_info.py", "filename": "system_info.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -283,10 +283,10 @@ def __init__ (self,\n defaults['search_static_first'] = str(self.search_static_first)\n self.cp = ConfigParser.ConfigParser(defaults)\n try:\n- __file__\n- except NameError:\n- __file__ = sys.argv[0]\n- cf = os.path.join(os.path.split(os.path.abspath(__file__))[0],\n+ f = __file__\n+ except NameError,msg:\n+ f = sys.argv[0]\n+ cf = os.path.join(os.path.split(os.path.abspath(f))[0],\n 'site.cfg')\n self.cp.read([cf])\n if not self.cp.has_section(self.section):\n", "added_lines": 4, "deleted_lines": 4, "source_code": "#!/usr/bin/env python\n\"\"\"\nThis file defines a set of system_info classes for getting\ninformation about various resources (libraries, library directories,\ninclude directories, etc.) in the system. Currently, the following\nclasses are available:\n\n atlas_info\n atlas_threads_info\n atlas_blas_info\n atlas_blas_threads_info\n lapack_atlas_info\n blas_info\n lapack_info\n blas_opt_info # usage recommended\n lapack_opt_info # usage recommended\n fftw_info,dfftw_info,sfftw_info\n fftw_threads_info,dfftw_threads_info,sfftw_threads_info\n djbfft_info\n x11_info\n lapack_src_info\n blas_src_info\n numpy_info\n numarray_info\n boost_python_info\n agg2_info\n wx_info\n gdk_pixbuf_xlib_2_info\n gdk_pixbuf_2_info\n gdk_x11_2_info\n gtkp_x11_2_info\n gtkp_2_info\n xft_info\n freetype2_info\n\nUsage:\n info_dict = get_info()\n where is a string 'atlas','x11','fftw','lapack','blas',\n 'lapack_src', 'blas_src', etc. For a complete list of allowed names,\n see the definition of get_info() function below.\n\n Returned info_dict is a dictionary which is compatible with\n distutils.setup keyword arguments. If info_dict == {}, then the\n asked resource is not available (system_info could not find it).\n\n Several *_info classes specify an environment variable to specify\n the locations of software. When setting the corresponding environment\n variable to 'None' then the software will be ignored, even when it\n is available in system.\n\nGlobal parameters:\n system_info.search_static_first - search static libraries (.a)\n in precedence to shared ones (.so, .sl) if enabled.\n system_info.verbosity - output the results to stdout if enabled.\n\nThe file 'site.cfg' in the same directory as this module is read\nfor configuration options. The format is that used by ConfigParser (i.e.,\nWindows .INI style). The section DEFAULT has options that are the default\nfor each section. The available sections are fftw, atlas, and x11. Appropiate\ndefaults are used if nothing is specified.\n\nThe order of finding the locations of resources is the following:\n 1. environment variable\n 2. section in site.cfg\n 3. DEFAULT section in site.cfg\nOnly the first complete match is returned.\n\nExample:\n----------\n[DEFAULT]\nlibrary_dirs = /usr/lib:/usr/local/lib:/opt/lib\ninclude_dirs = /usr/include:/usr/local/include:/opt/include\nsrc_dirs = /usr/local/src:/opt/src\n# search static libraries (.a) in preference to shared ones (.so)\nsearch_static_first = 0\n\n[fftw]\nfftw_libs = rfftw, fftw\nfftw_opt_libs = rfftw_threaded, fftw_threaded\n# if the above aren't found, look for {s,d}fftw_libs and {s,d}fftw_opt_libs\n\n[atlas]\nlibrary_dirs = /usr/lib/3dnow:/usr/lib/3dnow/atlas\n# for overriding the names of the atlas libraries\natlas_libs = lapack, f77blas, cblas, atlas\n\n[x11]\nlibrary_dirs = /usr/X11R6/lib\ninclude_dirs = /usr/X11R6/include\n----------\n\nAuthors:\n Pearu Peterson , February 2002\n David M. Cooke , April 2002\n\nCopyright 2002 Pearu Peterson all rights reserved,\nPearu Peterson \nPermission to use, modify, and distribute this software is given under the \nterms of the SciPy (BSD style) license. See LICENSE.txt that came with\nthis distribution for specifics.\n\nNO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\"\"\"\n\n__revision__ = '$Id$'\nimport sys,os,re,types\nimport warnings\nfrom distutils.errors import DistutilsError\nfrom glob import glob\nimport ConfigParser\nfrom exec_command import find_executable, exec_command\n\nfrom distutils.sysconfig import get_config_vars\n\nif sys.platform == 'win32':\n default_lib_dirs = ['C:\\\\'] # probably not very helpful...\n default_include_dirs = []\n default_src_dirs = ['.']\n default_x11_lib_dirs = []\n default_x11_include_dirs = []\nelse:\n default_lib_dirs = ['/usr/local/lib', '/opt/lib', '/usr/lib',\n '/sw/lib']\n default_include_dirs = ['/usr/local/include',\n '/opt/include', '/usr/include',\n '/sw/include']\n default_src_dirs = ['.','/usr/local/src', '/opt/src','/sw/src']\n default_x11_lib_dirs = ['/usr/X11R6/lib','/usr/X11/lib','/usr/lib']\n default_x11_include_dirs = ['/usr/X11R6/include','/usr/X11/include',\n '/usr/include']\n\nif os.path.join(sys.prefix, 'lib') not in default_lib_dirs:\n default_lib_dirs.insert(0,os.path.join(sys.prefix, 'lib'))\n default_include_dirs.append(os.path.join(sys.prefix, 'include'))\n default_src_dirs.append(os.path.join(sys.prefix, 'src'))\n\ndefault_lib_dirs = filter(os.path.isdir, default_lib_dirs)\ndefault_include_dirs = filter(os.path.isdir, default_include_dirs)\ndefault_src_dirs = filter(os.path.isdir, default_src_dirs)\n\nso_ext = get_config_vars('SO')[0] or ''\n\ndef get_info(name,notfound_action=0):\n \"\"\"\n notfound_action:\n 0 - do nothing\n 1 - display warning message\n 2 - raise error\n \"\"\"\n cl = {'atlas':atlas_info, # use lapack_opt or blas_opt instead\n 'atlas_threads':atlas_threads_info, # ditto\n 'atlas_blas':atlas_blas_info,\n 'atlas_blas_threads':atlas_blas_threads_info,\n 'lapack_atlas':lapack_atlas_info, # use lapack_opt instead\n 'lapack_atlas_threads':lapack_atlas_threads_info, # ditto\n 'x11':x11_info,\n 'fftw':fftw_info,\n 'dfftw':dfftw_info,\n 'sfftw':sfftw_info,\n 'fftw_threads':fftw_threads_info,\n 'dfftw_threads':dfftw_threads_info,\n 'sfftw_threads':sfftw_threads_info,\n 'djbfft':djbfft_info,\n 'blas':blas_info, # use blas_opt instead\n 'lapack':lapack_info, # use lapack_opt instead\n 'lapack_src':lapack_src_info,\n 'blas_src':blas_src_info,\n 'numpy':numpy_info,\n 'numarray':numarray_info,\n 'lapack_opt':lapack_opt_info,\n 'blas_opt':blas_opt_info,\n 'boost_python':boost_python_info,\n 'agg2':agg2_info,\n 'wx':wx_info,\n 'gdk_pixbuf_xlib_2':gdk_pixbuf_xlib_2_info,\n 'gdk-pixbuf-xlib-2.0':gdk_pixbuf_xlib_2_info,\n 'gdk_pixbuf_2':gdk_pixbuf_2_info,\n 'gdk-pixbuf-2.0':gdk_pixbuf_2_info,\n 'gdk':gdk_info,\n 'gdk_2':gdk_2_info,\n 'gdk-2.0':gdk_2_info,\n 'gdk_x11_2':gdk_x11_2_info,\n 'gdk-x11-2.0':gdk_x11_2_info,\n 'gtkp_x11_2':gtkp_x11_2_info,\n 'gtk+-x11-2.0':gtkp_x11_2_info,\n 'gtkp_2':gtkp_2_info,\n 'gtk+-2.0':gtkp_2_info,\n 'xft':xft_info,\n 'freetype2':freetype2_info,\n }.get(name.lower(),system_info)\n return cl().get_info(notfound_action)\n\nclass NotFoundError(DistutilsError):\n \"\"\"Some third-party program or library is not found.\"\"\"\n\nclass AtlasNotFoundError(NotFoundError):\n \"\"\"\n Atlas (http://math-atlas.sourceforge.net/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [atlas]) or by setting\n the ATLAS environment variable.\"\"\"\n\nclass LapackNotFoundError(NotFoundError):\n \"\"\"\n Lapack (http://www.netlib.org/lapack/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [lapack]) or by setting\n the LAPACK environment variable.\"\"\"\n\nclass LapackSrcNotFoundError(LapackNotFoundError):\n \"\"\"\n Lapack (http://www.netlib.org/lapack/) sources not found.\n Directories to search for the sources can be specified in the\n scipy_distutils/site.cfg file (section [lapack_src]) or by setting\n the LAPACK_SRC environment variable.\"\"\"\n\nclass BlasNotFoundError(NotFoundError):\n \"\"\"\n Blas (http://www.netlib.org/blas/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [blas]) or by setting\n the BLAS environment variable.\"\"\"\n\nclass BlasSrcNotFoundError(BlasNotFoundError):\n \"\"\"\n Blas (http://www.netlib.org/blas/) sources not found.\n Directories to search for the sources can be specified in the\n scipy_distutils/site.cfg file (section [blas_src]) or by setting\n the BLAS_SRC environment variable.\"\"\"\n\nclass FFTWNotFoundError(NotFoundError):\n \"\"\"\n FFTW (http://www.fftw.org/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [fftw]) or by setting\n the FFTW environment variable.\"\"\"\n\nclass DJBFFTNotFoundError(NotFoundError):\n \"\"\"\n DJBFFT (http://cr.yp.to/djbfft.html) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [djbfft]) or by setting\n the DJBFFT environment variable.\"\"\"\n\nclass F2pyNotFoundError(NotFoundError):\n \"\"\"\n f2py2e (http://cens.ioc.ee/projects/f2py2e/) module not found.\n Get it from above location, install it, and retry setup.py.\"\"\"\n\nclass NumericNotFoundError(NotFoundError):\n \"\"\"\n Numeric (http://www.numpy.org/) module not found.\n Get it from above location, install it, and retry setup.py.\"\"\"\n\nclass X11NotFoundError(NotFoundError):\n \"\"\"X11 libraries not found.\"\"\"\n\nclass system_info:\n\n \"\"\" get_info() is the only public method. Don't use others.\n \"\"\"\n section = 'DEFAULT'\n dir_env_var = None\n search_static_first = 0 # XXX: disabled by default, may disappear in\n # future unless it is proved to be useful.\n verbosity = 1\n saved_results = {}\n\n notfounderror = NotFoundError\n\n def __init__ (self,\n default_lib_dirs=default_lib_dirs,\n default_include_dirs=default_include_dirs,\n verbosity = 1,\n ):\n self.__class__.info = {}\n self.local_prefixes = []\n defaults = {}\n defaults['libraries'] = ''\n defaults['library_dirs'] = os.pathsep.join(default_lib_dirs)\n defaults['include_dirs'] = os.pathsep.join(default_include_dirs)\n defaults['src_dirs'] = os.pathsep.join(default_src_dirs)\n defaults['search_static_first'] = str(self.search_static_first)\n self.cp = ConfigParser.ConfigParser(defaults)\n try:\n f = __file__\n except NameError,msg:\n f = sys.argv[0]\n cf = os.path.join(os.path.split(os.path.abspath(f))[0],\n 'site.cfg')\n self.cp.read([cf])\n if not self.cp.has_section(self.section):\n self.cp.add_section(self.section)\n self.search_static_first = self.cp.getboolean(self.section,\n 'search_static_first')\n assert isinstance(self.search_static_first, type(0))\n\n def calc_libraries_info(self):\n libs = self.get_libraries()\n dirs = self.get_lib_dirs()\n info = {}\n for lib in libs:\n i = None\n for d in dirs:\n i = self.check_libs(d,[lib]) \n if i is not None:\n break\n if i is not None:\n dict_append(info,**i)\n else:\n print 'Library %s was not found. Ignoring' % (lib)\n return info\n\n def set_info(self,**info):\n if info: \n lib_info = self.calc_libraries_info()\n dict_append(info,**lib_info)\n self.saved_results[self.__class__.__name__] = info\n\n def has_info(self):\n return self.saved_results.has_key(self.__class__.__name__)\n\n def get_info(self,notfound_action=0):\n \"\"\" Return a dictonary with items that are compatible\n with scipy_distutils.setup keyword arguments.\n \"\"\"\n flag = 0\n if not self.has_info():\n flag = 1\n if self.verbosity>0:\n print self.__class__.__name__ + ':'\n if hasattr(self, 'calc_info'):\n self.calc_info()\n if notfound_action:\n if not self.has_info():\n if notfound_action==1:\n warnings.warn(self.notfounderror.__doc__)\n elif notfound_action==2:\n raise self.notfounderror,self.notfounderror.__doc__\n else:\n raise ValueError,`notfound_action`\n\n if self.verbosity>0:\n if not self.has_info():\n print ' NOT AVAILABLE'\n self.set_info()\n else:\n print ' FOUND:'\n \n res = self.saved_results.get(self.__class__.__name__)\n if self.verbosity>0 and flag:\n for k,v in res.items():\n v = str(v)\n if k=='sources' and len(v)>200: v = v[:60]+' ...\\n... '+v[-60:]\n print ' %s = %s'%(k,v)\n print\n \n return res\n\n def get_paths(self, section, key):\n dirs = self.cp.get(section, key).split(os.pathsep)\n env_var = self.dir_env_var\n if env_var:\n if type(env_var) is type([]):\n e0 = env_var[-1]\n for e in env_var:\n if os.environ.has_key(e):\n e0 = e\n break\n if not env_var[0]==e0:\n print 'Setting %s=%s' % (env_var[0],e0)\n env_var = e0\n if env_var and os.environ.has_key(env_var):\n d = os.environ[env_var]\n if d=='None':\n print 'Disabled',self.__class__.__name__,'(%s is None)' \\\n % (self.dir_env_var)\n return []\n if os.path.isfile(d):\n dirs = [os.path.dirname(d)] + dirs\n l = getattr(self,'_lib_names',[])\n if len(l)==1:\n b = os.path.basename(d)\n b = os.path.splitext(b)[0]\n if b[:3]=='lib':\n print 'Replacing _lib_names[0]==%r with %r' \\\n % (self._lib_names[0], b[3:])\n self._lib_names[0] = b[3:]\n else:\n ds = d.split(os.pathsep)\n ds2 = []\n for d in ds:\n if os.path.isdir(d):\n ds2.append(d)\n for dd in ['include','lib']:\n d1 = os.path.join(d,dd)\n if os.path.isdir(d1):\n ds2.append(d1)\n dirs = ds2 + dirs\n default_dirs = self.cp.get('DEFAULT', key).split(os.pathsep)\n dirs.extend(default_dirs)\n ret = []\n [ret.append(d) for d in dirs if os.path.isdir(d) and d not in ret]\n if self.verbosity>1:\n print '(',key,'=',':'.join(ret),')'\n return ret\n\n def get_lib_dirs(self, key='library_dirs'):\n return self.get_paths(self.section, key)\n\n def get_include_dirs(self, key='include_dirs'):\n return self.get_paths(self.section, key)\n\n def get_src_dirs(self, key='src_dirs'):\n return self.get_paths(self.section, key)\n\n def get_libs(self, key, default):\n try:\n libs = self.cp.get(self.section, key)\n except ConfigParser.NoOptionError:\n if not default:\n return []\n if type(default) is type(''):\n return [default]\n return default\n return [b for b in [a.strip() for a in libs.split(',')] if b]\n\n def get_libraries(self, key='libraries'):\n return self.get_libs(key,'')\n\n def check_libs(self,lib_dir,libs,opt_libs =[]):\n \"\"\" If static or shared libraries are available then return\n their info dictionary. \"\"\"\n if self.search_static_first:\n exts = ['.a',so_ext]\n else:\n exts = [so_ext,'.a']\n if sys.platform=='cygwin':\n exts.append('.dll.a')\n for ext in exts:\n info = self._check_libs(lib_dir,libs,opt_libs,ext)\n if info is not None: return info\n return\n\n def _lib_list(self, lib_dir, libs, ext):\n assert type(lib_dir) is type('')\n liblist = []\n for l in libs:\n p = self.combine_paths(lib_dir, 'lib'+l+ext)\n if p:\n assert len(p)==1\n liblist.append(p[0])\n return liblist\n\n def _extract_lib_names(self,libs):\n return [os.path.splitext(os.path.basename(p))[0][3:] \\\n for p in libs]\n\n def _check_libs(self,lib_dir,libs, opt_libs, ext):\n found_libs = self._lib_list(lib_dir, libs, ext)\n if len(found_libs) == len(libs):\n found_libs = self._extract_lib_names(found_libs)\n info = {'libraries' : found_libs, 'library_dirs' : [lib_dir]}\n opt_found_libs = self._lib_list(lib_dir, opt_libs, ext)\n if len(opt_found_libs) == len(opt_libs):\n opt_found_libs = self._extract_lib_names(opt_found_libs)\n info['libraries'].extend(opt_found_libs)\n return info\n\n def combine_paths(self,*args):\n return combine_paths(*args,**{'verbosity':self.verbosity})\n\nclass fftw_info(system_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['rfftw', 'fftw']\n includes = ['fftw.h','rfftw.h']\n macros = [('SCIPY_FFTW_H',None)]\n notfounderror = FFTWNotFoundError\n\n def __init__(self):\n system_info.__init__(self)\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n incl_dirs = self.get_include_dirs()\n incl_dir = None\n libs = self.get_libs(self.section+'_libs', self.libs)\n info = None\n for d in lib_dirs:\n r = self.check_libs(d,libs)\n if r is not None:\n info = r\n break\n if info is not None:\n flag = 0\n for d in incl_dirs:\n if len(self.combine_paths(d,self.includes))==2:\n dict_append(info,include_dirs=[d])\n flag = 1\n incl_dirs = [d]\n incl_dir = d\n break\n if flag:\n dict_append(info,define_macros=self.macros)\n else:\n info = None\n if info is not None:\n self.set_info(**info)\n\nclass dfftw_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['drfftw','dfftw']\n includes = ['dfftw.h','drfftw.h']\n macros = [('SCIPY_DFFTW_H',None)]\n\nclass sfftw_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['srfftw','sfftw']\n includes = ['sfftw.h','srfftw.h']\n macros = [('SCIPY_SFFTW_H',None)]\n\nclass fftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['rfftw_threads','fftw_threads']\n includes = ['fftw_threads.h','rfftw_threads.h']\n macros = [('SCIPY_FFTW_THREADS_H',None)]\n\nclass dfftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['drfftw_threads','dfftw_threads']\n includes = ['dfftw_threads.h','drfftw_threads.h']\n macros = [('SCIPY_DFFTW_THREADS_H',None)]\n\nclass sfftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['srfftw_threads','sfftw_threads']\n includes = ['sfftw_threads.h','srfftw_threads.h']\n macros = [('SCIPY_SFFTW_THREADS_H',None)]\n\nclass djbfft_info(system_info):\n section = 'djbfft'\n dir_env_var = 'DJBFFT'\n notfounderror = DJBFFTNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend(self.combine_paths(d,['djbfft'])+[d])\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n incl_dirs = self.get_include_dirs()\n info = None\n for d in lib_dirs:\n p = self.combine_paths (d,['djbfft.a'])\n if p:\n info = {'extra_objects':p}\n break\n p = self.combine_paths (d,['libdjbfft.a'])\n if p:\n info = {'libraries':['djbfft'],'library_dirs':[d]}\n break\n if info is None:\n return\n for d in incl_dirs:\n if len(self.combine_paths(d,['fftc8.h','fftfreq.h']))==2:\n dict_append(info,include_dirs=[d],\n define_macros=[('SCIPY_DJBFFT_H',None)])\n self.set_info(**info)\n return\n return\n\nclass atlas_info(system_info):\n section = 'atlas'\n dir_env_var = 'ATLAS'\n _lib_names = ['f77blas','cblas']\n if sys.platform[:7]=='freebsd':\n _lib_atlas = ['atlas_r']\n _lib_lapack = ['alapack_r']\n else:\n _lib_atlas = ['atlas']\n _lib_lapack = ['lapack']\n\n notfounderror = AtlasNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend(self.combine_paths(d,['atlas*','ATLAS*',\n 'sse','3dnow','sse2'])+[d])\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n info = {}\n atlas_libs = self.get_libs('atlas_libs',\n self._lib_names + self._lib_atlas)\n lapack_libs = self.get_libs('lapack_libs',self._lib_lapack)\n atlas = None\n lapack = None\n atlas_1 = None\n for d in lib_dirs:\n atlas = self.check_libs(d,atlas_libs,[])\n lapack_atlas = self.check_libs(d,['lapack_atlas'],[])\n if atlas is not None:\n lib_dirs2 = self.combine_paths(d,['atlas*','ATLAS*'])+[d]\n for d2 in lib_dirs2:\n lapack = self.check_libs(d2,lapack_libs,[])\n if lapack is not None:\n break\n else:\n lapack = None\n if lapack is not None:\n break\n if atlas:\n atlas_1 = atlas\n print self.__class__\n if atlas is None:\n atlas = atlas_1\n if atlas is None:\n return\n include_dirs = self.get_include_dirs()\n h = (self.combine_paths(lib_dirs+include_dirs,'cblas.h') or [None])[0]\n if h:\n h = os.path.dirname(h)\n dict_append(info,include_dirs=[h])\n info['language'] = 'c'\n if lapack is not None:\n dict_append(info,**lapack)\n dict_append(info,**atlas)\n elif 'lapack_atlas' in atlas['libraries']:\n dict_append(info,**atlas)\n dict_append(info,define_macros=[('ATLAS_WITH_LAPACK_ATLAS',None)])\n self.set_info(**info)\n return\n else:\n dict_append(info,**atlas)\n dict_append(info,define_macros=[('ATLAS_WITHOUT_LAPACK',None)])\n message = \"\"\"\n*********************************************************************\n Could not find lapack library within the ATLAS installation.\n*********************************************************************\n\"\"\"\n warnings.warn(message)\n self.set_info(**info)\n return\n \n # Check if lapack library is complete, only warn if it is not.\n lapack_dir = lapack['library_dirs'][0]\n lapack_name = lapack['libraries'][0]\n lapack_lib = None\n for e in ['.a',so_ext]:\n fn = os.path.join(lapack_dir,'lib'+lapack_name+e)\n if os.path.exists(fn):\n lapack_lib = fn\n break\n if lapack_lib is not None:\n sz = os.stat(lapack_lib)[6]\n if sz <= 4000*1024:\n message = \"\"\"\n*********************************************************************\n Lapack library (from ATLAS) is probably incomplete:\n size of %s is %sk (expected >4000k)\n\n Follow the instructions in the KNOWN PROBLEMS section of the file\n scipy/INSTALL.txt.\n*********************************************************************\n\"\"\" % (lapack_lib,sz/1024)\n warnings.warn(message)\n else:\n info['language'] = 'f77'\n\n self.set_info(**info)\n\nclass atlas_blas_info(atlas_info):\n _lib_names = ['f77blas','cblas']\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n info = {}\n atlas_libs = self.get_libs('atlas_libs',\n self._lib_names + self._lib_atlas)\n atlas = None\n for d in lib_dirs:\n atlas = self.check_libs(d,atlas_libs,[])\n if atlas is not None:\n break\n if atlas is None:\n return\n include_dirs = self.get_include_dirs()\n h = (self.combine_paths(lib_dirs+include_dirs,'cblas.h') or [None])[0]\n if h:\n h = os.path.dirname(h)\n dict_append(info,include_dirs=[h])\n info['language'] = 'c'\n\n dict_append(info,**atlas)\n\n self.set_info(**info)\n return\n\n\nclass atlas_threads_info(atlas_info):\n dir_env_var = ['PTATLAS','ATLAS']\n _lib_names = ['ptf77blas','ptcblas']\n\nclass atlas_blas_threads_info(atlas_blas_info):\n dir_env_var = ['PTATLAS','ATLAS']\n _lib_names = ['ptf77blas','ptcblas']\n\nclass lapack_atlas_info(atlas_info):\n _lib_names = ['lapack_atlas'] + atlas_info._lib_names\n\nclass lapack_atlas_threads_info(atlas_threads_info):\n _lib_names = ['lapack_atlas'] + atlas_threads_info._lib_names\n\nclass lapack_info(system_info):\n section = 'lapack'\n dir_env_var = 'LAPACK'\n _lib_names = ['lapack']\n notfounderror = LapackNotFoundError\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n\n lapack_libs = self.get_libs('lapack_libs', self._lib_names)\n for d in lib_dirs:\n lapack = self.check_libs(d,lapack_libs,[])\n if lapack is not None:\n info = lapack \n break\n else:\n return\n info['language'] = 'f77'\n self.set_info(**info)\n\nclass lapack_src_info(system_info):\n section = 'lapack_src'\n dir_env_var = 'LAPACK_SRC'\n notfounderror = LapackSrcNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['LAPACK*/SRC','SRC']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'dgesv.f')):\n src_dir = d\n break\n if not src_dir:\n #XXX: Get sources from netlib. May be ask first.\n return\n # The following is extracted from LAPACK-3.0/SRC/Makefile\n allaux='''\n ilaenv ieeeck lsame lsamen xerbla\n ''' # *.f\n laux = '''\n bdsdc bdsqr disna labad lacpy ladiv lae2 laebz laed0 laed1\n laed2 laed3 laed4 laed5 laed6 laed7 laed8 laed9 laeda laev2\n lagtf lagts lamch lamrg lanst lapy2 lapy3 larnv larrb larre\n larrf lartg laruv las2 lascl lasd0 lasd1 lasd2 lasd3 lasd4\n lasd5 lasd6 lasd7 lasd8 lasd9 lasda lasdq lasdt laset lasq1\n lasq2 lasq3 lasq4 lasq5 lasq6 lasr lasrt lassq lasv2 pttrf\n stebz stedc steqr sterf\n ''' # [s|d]*.f\n lasrc = '''\n gbbrd gbcon gbequ gbrfs gbsv gbsvx gbtf2 gbtrf gbtrs gebak\n gebal gebd2 gebrd gecon geequ gees geesx geev geevx gegs gegv\n gehd2 gehrd gelq2 gelqf gels gelsd gelss gelsx gelsy geql2\n geqlf geqp3 geqpf geqr2 geqrf gerfs gerq2 gerqf gesc2 gesdd\n gesv gesvd gesvx getc2 getf2 getrf getri getrs ggbak ggbal\n gges ggesx ggev ggevx ggglm gghrd gglse ggqrf ggrqf ggsvd\n ggsvp gtcon gtrfs gtsv gtsvx gttrf gttrs gtts2 hgeqz hsein\n hseqr labrd lacon laein lags2 lagtm lahqr lahrd laic1 lals0\n lalsa lalsd langb lange langt lanhs lansb lansp lansy lantb\n lantp lantr lapll lapmt laqgb laqge laqp2 laqps laqsb laqsp\n laqsy lar1v lar2v larf larfb larfg larft larfx largv larrv\n lartv larz larzb larzt laswp lasyf latbs latdf latps latrd\n latrs latrz latzm lauu2 lauum pbcon pbequ pbrfs pbstf pbsv\n pbsvx pbtf2 pbtrf pbtrs pocon poequ porfs posv posvx potf2\n potrf potri potrs ppcon ppequ pprfs ppsv ppsvx pptrf pptri\n pptrs ptcon pteqr ptrfs ptsv ptsvx pttrs ptts2 spcon sprfs\n spsv spsvx sptrf sptri sptrs stegr stein sycon syrfs sysv\n sysvx sytf2 sytrf sytri sytrs tbcon tbrfs tbtrs tgevc tgex2\n tgexc tgsen tgsja tgsna tgsy2 tgsyl tpcon tprfs tptri tptrs\n trcon trevc trexc trrfs trsen trsna trsyl trti2 trtri trtrs\n tzrqf tzrzf\n ''' # [s|c|d|z]*.f\n sd_lasrc = '''\n laexc lag2 lagv2 laln2 lanv2 laqtr lasy2 opgtr opmtr org2l\n org2r orgbr orghr orgl2 orglq orgql orgqr orgr2 orgrq orgtr\n orm2l orm2r ormbr ormhr orml2 ormlq ormql ormqr ormr2 ormr3\n ormrq ormrz ormtr rscl sbev sbevd sbevx sbgst sbgv sbgvd sbgvx\n sbtrd spev spevd spevx spgst spgv spgvd spgvx sptrd stev stevd\n stevr stevx syev syevd syevr syevx sygs2 sygst sygv sygvd\n sygvx sytd2 sytrd\n ''' # [s|d]*.f\n cz_lasrc = '''\n bdsqr hbev hbevd hbevx hbgst hbgv hbgvd hbgvx hbtrd hecon heev\n heevd heevr heevx hegs2 hegst hegv hegvd hegvx herfs hesv\n hesvx hetd2 hetf2 hetrd hetrf hetri hetrs hpcon hpev hpevd\n hpevx hpgst hpgv hpgvd hpgvx hprfs hpsv hpsvx hptrd hptrf\n hptri hptrs lacgv lacp2 lacpy lacrm lacrt ladiv laed0 laed7\n laed8 laesy laev2 lahef lanhb lanhe lanhp lanht laqhb laqhe\n laqhp larcm larnv lartg lascl laset lasr lassq pttrf rot spmv\n spr stedc steqr symv syr ung2l ung2r ungbr unghr ungl2 unglq\n ungql ungqr ungr2 ungrq ungtr unm2l unm2r unmbr unmhr unml2\n unmlq unmql unmqr unmr2 unmr3 unmrq unmrz unmtr upgtr upmtr\n ''' # [c|z]*.f\n #######\n sclaux = laux + ' econd ' # s*.f\n dzlaux = laux + ' secnd ' # d*.f\n slasrc = lasrc + sd_lasrc # s*.f\n dlasrc = lasrc + sd_lasrc # d*.f\n clasrc = lasrc + cz_lasrc + ' srot srscl ' # c*.f\n zlasrc = lasrc + cz_lasrc + ' drot drscl ' # z*.f\n oclasrc = ' icmax1 scsum1 ' # *.f\n ozlasrc = ' izmax1 dzsum1 ' # *.f\n sources = ['s%s.f'%f for f in (sclaux+slasrc).split()] \\\n + ['d%s.f'%f for f in (dzlaux+dlasrc).split()] \\\n + ['c%s.f'%f for f in (clasrc).split()] \\\n + ['z%s.f'%f for f in (zlasrc).split()] \\\n + ['%s.f'%f for f in (allaux+oclasrc+ozlasrc).split()]\n sources = [os.path.join(src_dir,f) for f in sources]\n #XXX: should we check here actual existence of source files?\n info = {'sources':sources,'language':'f77'}\n self.set_info(**info)\n\natlas_version_c_text = r'''\n/* This file is generated from scipy_distutils/system_info.py */\n#ifdef __CPLUSPLUS__\nextern \"C\" {\n#endif\n#include \"Python.h\"\nstatic PyMethodDef module_methods[] = { {NULL,NULL} };\nDL_EXPORT(void) initatlas_version(void) {\n void ATL_buildinfo(void);\n ATL_buildinfo();\n Py_InitModule(\"atlas_version\", module_methods);\n}\n#ifdef __CPLUSCPLUS__\n}\n#endif\n'''\n\ndef get_atlas_version(**config):\n from core import Extension, setup\n from misc_util import get_build_temp\n import log\n magic = hex(hash(`config`))\n def atlas_version_c(extension, build_dir,magic=magic):\n source = os.path.join(build_dir,'atlas_version_%s.c' % (magic))\n if os.path.isfile(source):\n from distutils.dep_util import newer\n if newer(source,__file__):\n return source\n f = open(source,'w')\n f.write(atlas_version_c_text)\n f.close()\n return source\n ext = Extension('atlas_version',\n sources=[atlas_version_c],\n **config)\n extra_args = ['--build-lib',get_build_temp()]\n for a in sys.argv:\n if re.match('[-][-]compiler[=]',a):\n extra_args.append(a)\n try:\n dist = setup(ext_modules=[ext],\n script_name = 'get_atlas_version',\n script_args = ['build_src','build_ext']+extra_args)\n except Exception,msg:\n print \"##### msg: %s\" % msg\n if not msg:\n msg = \"Unknown Exception\"\n log.warn(msg)\n return None\n\n from distutils.sysconfig import get_config_var\n so_ext = get_config_var('SO')\n build_ext = dist.get_command_obj('build_ext')\n target = os.path.join(build_ext.build_lib,'atlas_version'+so_ext)\n from exec_command import exec_command,get_pythonexe\n cmd = [get_pythonexe(),'-c',\n '\"import imp;imp.load_dynamic(\\\\\"atlas_version\\\\\",\\\\\"%s\\\\\")\"'\\\n % (os.path.basename(target))]\n s,o = exec_command(cmd,execute_in=os.path.dirname(target),use_tee=0)\n atlas_version = None\n if not s:\n m = re.search(r'ATLAS version (?P\\d+[.]\\d+[.]\\d+)',o)\n if m:\n atlas_version = m.group('version')\n if atlas_version is None:\n if re.search(r'undefined symbol: ATL_buildinfo',o,re.M):\n atlas_version = '3.2.1_pre3.3.6'\n else:\n print 'Command:',' '.join(cmd)\n print 'Status:',s\n print 'Output:',o\n return atlas_version\n\n\nclass lapack_opt_info(system_info):\n \n def calc_info(self):\n\n if sys.platform=='darwin' and not os.environ.get('ATLAS',None):\n args = []\n link_args = []\n if os.path.exists('/System/Library/Frameworks/Accelerate.framework/'):\n args.extend(['-faltivec','-framework','Accelerate'])\n link_args.extend(['-Wl,-framework','-Wl,Accelerate'])\n elif os.path.exists('/System/Library/Frameworks/vecLib.framework/'):\n args.extend(['-faltivec','-framework','vecLib'])\n link_args.extend(['-Wl,-framework','-Wl,vecLib'])\n if args:\n self.set_info(extra_compile_args=args,\n extra_link_args=link_args,\n define_macros=[('NO_ATLAS_INFO',3)])\n return\n\n atlas_info = get_info('atlas_threads')\n if not atlas_info:\n atlas_info = get_info('atlas')\n #atlas_info = {} ## uncomment for testing\n atlas_version = None\n need_lapack = 0\n need_blas = 0\n info = {}\n if atlas_info:\n version_info = atlas_info.copy()\n atlas_version = get_atlas_version(**version_info)\n if not atlas_info.has_key('define_macros'):\n atlas_info['define_macros'] = []\n if atlas_version is None:\n atlas_info['define_macros'].append(('NO_ATLAS_INFO',2))\n else:\n atlas_info['define_macros'].append(('ATLAS_INFO',\n '\"\\\\\"%s\\\\\"\"' % atlas_version))\n\t\tif atlas_version=='3.2.1_pre3.3.6':\n\t\t atlas_info['define_macros'].append(('NO_ATLAS_INFO',4))\n l = atlas_info.get('define_macros',[])\n if ('ATLAS_WITH_LAPACK_ATLAS',None) in l \\\n or ('ATLAS_WITHOUT_LAPACK',None) in l:\n need_lapack = 1\n info = atlas_info\n else:\n warnings.warn(AtlasNotFoundError.__doc__)\n need_blas = 1\n need_lapack = 1\n dict_append(info,define_macros=[('NO_ATLAS_INFO',1)])\n\n if need_lapack:\n lapack_info = get_info('lapack')\n #lapack_info = {} ## uncomment for testing\n if lapack_info:\n dict_append(info,**lapack_info)\n else:\n warnings.warn(LapackNotFoundError.__doc__)\n lapack_src_info = get_info('lapack_src')\n if not lapack_src_info:\n warnings.warn(LapackSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('flapack_src',lapack_src_info)])\n\n if need_blas:\n blas_info = get_info('blas')\n #blas_info = {} ## uncomment for testing\n if blas_info:\n dict_append(info,**blas_info)\n else:\n warnings.warn(BlasNotFoundError.__doc__)\n blas_src_info = get_info('blas_src')\n if not blas_src_info:\n warnings.warn(BlasSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('fblas_src',blas_src_info)])\n\n self.set_info(**info)\n return\n\n\nclass blas_opt_info(system_info):\n \n def calc_info(self):\n\n if sys.platform=='darwin' and not os.environ.get('ATLAS',None):\n args = []\n link_args = []\n if os.path.exists('/System/Library/Frameworks/Accelerate.framework/'):\n args.extend(['-faltivec','-framework','Accelerate'])\n link_args.extend(['-Wl,-framework','-Wl,Accelerate'])\n elif os.path.exists('/System/Library/Frameworks/vecLib.framework/'):\n args.extend(['-faltivec','-framework','vecLib'])\n link_args.extend(['-Wl,-framework','-Wl,vecLib'])\n if args:\n self.set_info(extra_compile_args=args,\n extra_link_args=link_args,\n define_macros=[('NO_ATLAS_INFO',3)])\n return\n\n atlas_info = get_info('atlas_blas_threads')\n if not atlas_info:\n atlas_info = get_info('atlas_blas')\n atlas_version = None\n need_blas = 0\n info = {}\n if atlas_info:\n version_info = atlas_info.copy()\n atlas_version = get_atlas_version(**version_info)\n if not atlas_info.has_key('define_macros'):\n atlas_info['define_macros'] = []\n if atlas_version is None:\n atlas_info['define_macros'].append(('NO_ATLAS_INFO',2))\n else:\n atlas_info['define_macros'].append(('ATLAS_INFO',\n '\"\\\\\"%s\\\\\"\"' % atlas_version))\n info = atlas_info\n else:\n warnings.warn(AtlasNotFoundError.__doc__)\n need_blas = 1\n dict_append(info,define_macros=[('NO_ATLAS_INFO',1)])\n\n if need_blas:\n blas_info = get_info('blas')\n if blas_info:\n dict_append(info,**blas_info)\n else:\n warnings.warn(BlasNotFoundError.__doc__)\n blas_src_info = get_info('blas_src')\n if not blas_src_info:\n warnings.warn(BlasSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('fblas_src',blas_src_info)])\n\n self.set_info(**info)\n return\n\n\nclass blas_info(system_info):\n section = 'blas'\n dir_env_var = 'BLAS'\n _lib_names = ['blas']\n notfounderror = BlasNotFoundError\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n\n blas_libs = self.get_libs('blas_libs', self._lib_names)\n for d in lib_dirs:\n blas = self.check_libs(d,blas_libs,[])\n if blas is not None:\n info = blas \n break\n else:\n return\n info['language'] = 'f77' # XXX: is it generally true?\n self.set_info(**info)\n\n\nclass blas_src_info(system_info):\n section = 'blas_src'\n dir_env_var = 'BLAS_SRC'\n notfounderror = BlasSrcNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['blas']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'daxpy.f')):\n src_dir = d\n break\n if not src_dir:\n #XXX: Get sources from netlib. May be ask first.\n return\n blas1 = '''\n caxpy csscal dnrm2 dzasum saxpy srotg zdotc ccopy cswap drot\n dznrm2 scasum srotm zdotu cdotc dasum drotg icamax scnrm2\n srotmg zdrot cdotu daxpy drotm idamax scopy sscal zdscal crotg\n dcabs1 drotmg isamax sdot sswap zrotg cscal dcopy dscal izamax\n snrm2 zaxpy zscal csrot ddot dswap sasum srot zcopy zswap\n '''\n blas2 = '''\n cgbmv chpmv ctrsv dsymv dtrsv sspr2 strmv zhemv ztpmv cgemv\n chpr dgbmv dsyr lsame ssymv strsv zher ztpsv cgerc chpr2 dgemv\n dsyr2 sgbmv ssyr xerbla zher2 ztrmv cgeru ctbmv dger dtbmv\n sgemv ssyr2 zgbmv zhpmv ztrsv chbmv ctbsv dsbmv dtbsv sger\n stbmv zgemv zhpr chemv ctpmv dspmv dtpmv ssbmv stbsv zgerc\n zhpr2 cher ctpsv dspr dtpsv sspmv stpmv zgeru ztbmv cher2\n ctrmv dspr2 dtrmv sspr stpsv zhbmv ztbsv\n '''\n blas3 = '''\n cgemm csymm ctrsm dsyrk sgemm strmm zhemm zsyr2k chemm csyr2k\n dgemm dtrmm ssymm strsm zher2k zsyrk cher2k csyrk dsymm dtrsm\n ssyr2k zherk ztrmm cherk ctrmm dsyr2k ssyrk zgemm zsymm ztrsm\n '''\n sources = [os.path.join(src_dir,f+'.f') \\\n for f in (blas1+blas2+blas3).split()]\n #XXX: should we check here actual existence of source files?\n info = {'sources':sources,'language':'f77'}\n self.set_info(**info)\n\nclass x11_info(system_info):\n section = 'x11'\n notfounderror = X11NotFoundError\n\n def __init__(self):\n system_info.__init__(self,\n default_lib_dirs=default_x11_lib_dirs,\n default_include_dirs=default_x11_include_dirs)\n\n def calc_info(self):\n if sys.platform in ['win32']:\n return\n lib_dirs = self.get_lib_dirs()\n include_dirs = self.get_include_dirs()\n x11_libs = self.get_libs('x11_libs', ['X11'])\n for lib_dir in lib_dirs:\n info = self.check_libs(lib_dir, x11_libs, [])\n if info is not None:\n break\n else:\n return\n inc_dir = None\n for d in include_dirs:\n if self.combine_paths(d, 'X11/X.h'):\n inc_dir = d\n break\n if inc_dir is not None:\n dict_append(info, include_dirs=[inc_dir])\n self.set_info(**info)\n\nclass numpy_info(system_info):\n section = 'numpy'\n modulename = 'Numeric'\n notfounderror = NumericNotFoundError\n\n def __init__(self):\n from distutils.sysconfig import get_python_inc\n include_dirs = []\n try:\n module = __import__(self.modulename)\n prefix = []\n for name in module.__file__.split(os.sep):\n if name=='lib':\n break\n prefix.append(name)\n include_dirs.append(get_python_inc(prefix=os.sep.join(prefix)))\n except ImportError:\n pass\n py_incl_dir = get_python_inc()\n include_dirs.append(py_incl_dir)\n for d in default_include_dirs:\n d = os.path.join(d, os.path.basename(py_incl_dir))\n if d not in include_dirs:\n include_dirs.append(d)\n system_info.__init__(self,\n default_lib_dirs=[],\n default_include_dirs=include_dirs)\n\n def calc_info(self):\n try:\n module = __import__(self.modulename)\n except ImportError:\n return\n info = {}\n macros = [(self.modulename.upper()+'_VERSION',\n '\"\\\\\"%s\\\\\"\"' % (module.__version__))]\n## try:\n## macros.append(\n## (self.modulename.upper()+'_VERSION_HEX',\n## hex(vstr2hex(module.__version__))),\n## )\n## except Exception,msg:\n## print msg\n dict_append(info, define_macros = macros)\n include_dirs = self.get_include_dirs()\n inc_dir = None\n for d in include_dirs:\n if self.combine_paths(d,\n os.path.join(self.modulename,\n 'arrayobject.h')):\n inc_dir = d\n break\n if inc_dir is not None:\n dict_append(info, include_dirs=[inc_dir])\n if info:\n self.set_info(**info)\n return\n\nclass numarray_info(numpy_info):\n section = 'numarray'\n modulename = 'numarray'\n\nclass boost_python_info(system_info):\n section = 'boost_python'\n dir_env_var = 'BOOST'\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['boost*']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n from distutils.sysconfig import get_python_inc\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'libs','python','src','module.cpp')):\n src_dir = d\n break\n if not src_dir:\n return\n py_incl_dir = get_python_inc()\n srcs_dir = os.path.join(src_dir,'libs','python','src')\n bpl_srcs = glob(os.path.join(srcs_dir,'*.cpp'))\n bpl_srcs += glob(os.path.join(srcs_dir,'*','*.cpp'))\n info = {'libraries':[('boost_python_src',{'include_dirs':[src_dir,py_incl_dir],\n 'sources':bpl_srcs})],\n 'include_dirs':[src_dir],\n }\n if info:\n self.set_info(**info)\n return\n\nclass agg2_info(system_info):\n section = 'agg2'\n dir_env_var = 'AGG2'\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['agg2*']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'src','agg_affine_matrix.cpp')):\n src_dir = d\n break\n if not src_dir:\n return\n if sys.platform=='win32':\n agg2_srcs = glob(os.path.join(src_dir,'src','platform','win32','agg_win32_bmp.cpp'))\n else:\n agg2_srcs = glob(os.path.join(src_dir,'src','*.cpp'))\n agg2_srcs += [os.path.join(src_dir,'src','platform','X11','agg_platform_support.cpp')]\n \n info = {'libraries':[('agg2_src',{'sources':agg2_srcs,\n 'include_dirs':[os.path.join(src_dir,'include')],\n })],\n 'include_dirs':[os.path.join(src_dir,'include')],\n }\n if info:\n self.set_info(**info)\n return\n\nclass _pkg_config_info(system_info):\n section = None\n config_env_var = 'PKG_CONFIG'\n default_config_exe = 'pkg-config'\n append_config_exe = ''\n version_macro_name = None\n release_macro_name = None\n version_flag = '--modversion'\n cflags_flag = '--cflags'\n\n def get_config_exe(self):\n if os.environ.has_key(self.config_env_var):\n return os.environ[self.config_env_var]\n return self.default_config_exe\n def get_config_output(self, config_exe, option):\n s,o = exec_command(config_exe+' '+self.append_config_exe+' '+option,use_tee=0)\n if not s:\n return o\n\n def calc_info(self):\n config_exe = find_executable(self.get_config_exe())\n if not os.path.isfile(config_exe):\n print 'File not found: %s. Cannot determine %s info.' \\\n % (config_exe, self.section)\n return\n info = {}\n macros = []\n libraries = []\n library_dirs = []\n include_dirs = []\n extra_link_args = []\n extra_compile_args = []\n version = self.get_config_output(config_exe,self.version_flag)\n if version:\n macros.append((self.__class__.__name__.split('.')[-1].upper(),\n '\"\\\\\"%s\\\\\"\"' % (version)))\n if self.version_macro_name:\n macros.append((self.version_macro_name+'_%s' % (version.replace('.','_')),None))\n if self.release_macro_name:\n release = self.get_config_output(config_exe,'--release')\n if release:\n macros.append((self.release_macro_name+'_%s' % (release.replace('.','_')),None))\n opts = self.get_config_output(config_exe,'--libs')\n if opts:\n for opt in opts.split():\n if opt[:2]=='-l':\n libraries.append(opt[2:])\n elif opt[:2]=='-L':\n library_dirs.append(opt[2:])\n else:\n extra_link_args.append(opt)\n opts = self.get_config_output(config_exe,self.cflags_flag)\n if opts:\n for opt in opts.split():\n if opt[:2]=='-I':\n include_dirs.append(opt[2:])\n elif opt[:2]=='-D':\n if '=' in opt:\n n,v = opt[2:].split('=')\n macros.append((n,v))\n else:\n macros.append((opt[2:],None))\n else:\n extra_compile_args.append(opt)\n if macros: dict_append(info, define_macros = macros)\n if libraries: dict_append(info, libraries = libraries)\n if library_dirs: dict_append(info, library_dirs = library_dirs)\n if include_dirs: dict_append(info, include_dirs = include_dirs)\n if extra_link_args: dict_append(info, extra_link_args = extra_link_args)\n if extra_compile_args: dict_append(info, extra_compile_args = extra_compile_args)\n if info:\n self.set_info(**info)\n return\n\nclass wx_info(_pkg_config_info):\n section = 'wx'\n config_env_var = 'WX_CONFIG'\n default_config_exe = 'wx-config'\n append_config_exe = ''\n version_macro_name = 'WX_VERSION'\n release_macro_name = 'WX_RELEASE'\n version_flag = '--version'\n cflags_flag = '--cxxflags'\n\nclass gdk_pixbuf_xlib_2_info(_pkg_config_info):\n section = 'gdk_pixbuf_xlib_2'\n append_config_exe = 'gdk-pixbuf-xlib-2.0'\n version_macro_name = 'GDK_PIXBUF_XLIB_VERSION'\n\nclass gdk_pixbuf_2_info(_pkg_config_info):\n section = 'gdk_pixbuf_2'\n append_config_exe = 'gdk-pixbuf-2.0'\n version_macro_name = 'GDK_PIXBUF_VERSION'\n\nclass gdk_x11_2_info(_pkg_config_info):\n section = 'gdk_x11_2'\n append_config_exe = 'gdk-x11-2.0'\n version_macro_name = 'GDK_X11_VERSION'\n\nclass gdk_2_info(_pkg_config_info):\n section = 'gdk_2'\n append_config_exe = 'gdk-2.0'\n version_macro_name = 'GDK_VERSION'\n\nclass gdk_info(_pkg_config_info):\n section = 'gdk'\n append_config_exe = 'gdk'\n version_macro_name = 'GDK_VERSION'\n\nclass gtkp_x11_2_info(_pkg_config_info):\n section = 'gtkp_x11_2'\n append_config_exe = 'gtk+-x11-2.0'\n version_macro_name = 'GTK_X11_VERSION'\n\n\nclass gtkp_2_info(_pkg_config_info):\n section = 'gtkp_2'\n append_config_exe = 'gtk+-2.0'\n version_macro_name = 'GTK_VERSION'\n\nclass xft_info(_pkg_config_info):\n section = 'xft'\n append_config_exe = 'xft'\n version_macro_name = 'XFT_VERSION'\n\nclass freetype2_info(_pkg_config_info):\n section = 'freetype2'\n append_config_exe = 'freetype2'\n version_macro_name = 'FREETYPE2_VERSION'\n\n## def vstr2hex(version):\n## bits = []\n## n = [24,16,8,4,0]\n## r = 0\n## for s in version.split('.'):\n## r |= int(s) << n[0]\n## del n[0]\n## return r\n\n#--------------------------------------------------------------------\n\ndef combine_paths(*args,**kws):\n \"\"\" Return a list of existing paths composed by all combinations of\n items from arguments.\n \"\"\"\n r = []\n for a in args:\n if not a: continue\n if type(a) is types.StringType:\n a = [a]\n r.append(a)\n args = r\n if not args: return []\n if len(args)==1:\n result = reduce(lambda a,b:a+b,map(glob,args[0]),[])\n elif len (args)==2:\n result = []\n for a0 in args[0]:\n for a1 in args[1]:\n result.extend(glob(os.path.join(a0,a1)))\n else:\n result = combine_paths(*(combine_paths(args[0],args[1])+args[2:]))\n verbosity = kws.get('verbosity',1)\n if verbosity>1 and result:\n print '(','paths:',','.join(result),')'\n return result\n\nlanguage_map = {'c':0,'c++':1,'f77':2,'f90':3}\ninv_language_map = {0:'c',1:'c++',2:'f77',3:'f90'}\ndef dict_append(d,**kws):\n languages = []\n for k,v in kws.items():\n if k=='language':\n languages.append(v)\n continue\n if d.has_key(k):\n if k in ['library_dirs','include_dirs','define_macros']:\n [d[k].append(vv) for vv in v if vv not in d[k]]\n else:\n d[k].extend(v)\n else:\n d[k] = v\n if languages:\n l = inv_language_map[max([language_map.get(l,0) for l in languages])]\n d['language'] = l\n return\n\ndef show_all():\n import system_info\n import pprint\n match_info = re.compile(r'.*?_info').match\n show_only = []\n for n in sys.argv[1:]:\n if n[-5:] != '_info':\n n = n + '_info'\n show_only.append(n)\n show_all = not show_only\n for n in filter(match_info,dir(system_info)):\n if n in ['system_info','get_info']: continue\n if not show_all:\n if n not in show_only: continue\n del show_only[show_only.index(n)]\n c = getattr(system_info,n)()\n c.verbosity = 2\n r = c.get_info()\n if show_only:\n print 'Info classes not defined:',','.join(show_only)\nif __name__ == \"__main__\":\n show_all()\n", "source_code_before": "#!/usr/bin/env python\n\"\"\"\nThis file defines a set of system_info classes for getting\ninformation about various resources (libraries, library directories,\ninclude directories, etc.) in the system. Currently, the following\nclasses are available:\n\n atlas_info\n atlas_threads_info\n atlas_blas_info\n atlas_blas_threads_info\n lapack_atlas_info\n blas_info\n lapack_info\n blas_opt_info # usage recommended\n lapack_opt_info # usage recommended\n fftw_info,dfftw_info,sfftw_info\n fftw_threads_info,dfftw_threads_info,sfftw_threads_info\n djbfft_info\n x11_info\n lapack_src_info\n blas_src_info\n numpy_info\n numarray_info\n boost_python_info\n agg2_info\n wx_info\n gdk_pixbuf_xlib_2_info\n gdk_pixbuf_2_info\n gdk_x11_2_info\n gtkp_x11_2_info\n gtkp_2_info\n xft_info\n freetype2_info\n\nUsage:\n info_dict = get_info()\n where is a string 'atlas','x11','fftw','lapack','blas',\n 'lapack_src', 'blas_src', etc. For a complete list of allowed names,\n see the definition of get_info() function below.\n\n Returned info_dict is a dictionary which is compatible with\n distutils.setup keyword arguments. If info_dict == {}, then the\n asked resource is not available (system_info could not find it).\n\n Several *_info classes specify an environment variable to specify\n the locations of software. When setting the corresponding environment\n variable to 'None' then the software will be ignored, even when it\n is available in system.\n\nGlobal parameters:\n system_info.search_static_first - search static libraries (.a)\n in precedence to shared ones (.so, .sl) if enabled.\n system_info.verbosity - output the results to stdout if enabled.\n\nThe file 'site.cfg' in the same directory as this module is read\nfor configuration options. The format is that used by ConfigParser (i.e.,\nWindows .INI style). The section DEFAULT has options that are the default\nfor each section. The available sections are fftw, atlas, and x11. Appropiate\ndefaults are used if nothing is specified.\n\nThe order of finding the locations of resources is the following:\n 1. environment variable\n 2. section in site.cfg\n 3. DEFAULT section in site.cfg\nOnly the first complete match is returned.\n\nExample:\n----------\n[DEFAULT]\nlibrary_dirs = /usr/lib:/usr/local/lib:/opt/lib\ninclude_dirs = /usr/include:/usr/local/include:/opt/include\nsrc_dirs = /usr/local/src:/opt/src\n# search static libraries (.a) in preference to shared ones (.so)\nsearch_static_first = 0\n\n[fftw]\nfftw_libs = rfftw, fftw\nfftw_opt_libs = rfftw_threaded, fftw_threaded\n# if the above aren't found, look for {s,d}fftw_libs and {s,d}fftw_opt_libs\n\n[atlas]\nlibrary_dirs = /usr/lib/3dnow:/usr/lib/3dnow/atlas\n# for overriding the names of the atlas libraries\natlas_libs = lapack, f77blas, cblas, atlas\n\n[x11]\nlibrary_dirs = /usr/X11R6/lib\ninclude_dirs = /usr/X11R6/include\n----------\n\nAuthors:\n Pearu Peterson , February 2002\n David M. Cooke , April 2002\n\nCopyright 2002 Pearu Peterson all rights reserved,\nPearu Peterson \nPermission to use, modify, and distribute this software is given under the \nterms of the SciPy (BSD style) license. See LICENSE.txt that came with\nthis distribution for specifics.\n\nNO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\"\"\"\n\n__revision__ = '$Id$'\nimport sys,os,re,types\nimport warnings\nfrom distutils.errors import DistutilsError\nfrom glob import glob\nimport ConfigParser\nfrom exec_command import find_executable, exec_command\n\nfrom distutils.sysconfig import get_config_vars\n\nif sys.platform == 'win32':\n default_lib_dirs = ['C:\\\\'] # probably not very helpful...\n default_include_dirs = []\n default_src_dirs = ['.']\n default_x11_lib_dirs = []\n default_x11_include_dirs = []\nelse:\n default_lib_dirs = ['/usr/local/lib', '/opt/lib', '/usr/lib',\n '/sw/lib']\n default_include_dirs = ['/usr/local/include',\n '/opt/include', '/usr/include',\n '/sw/include']\n default_src_dirs = ['.','/usr/local/src', '/opt/src','/sw/src']\n default_x11_lib_dirs = ['/usr/X11R6/lib','/usr/X11/lib','/usr/lib']\n default_x11_include_dirs = ['/usr/X11R6/include','/usr/X11/include',\n '/usr/include']\n\nif os.path.join(sys.prefix, 'lib') not in default_lib_dirs:\n default_lib_dirs.insert(0,os.path.join(sys.prefix, 'lib'))\n default_include_dirs.append(os.path.join(sys.prefix, 'include'))\n default_src_dirs.append(os.path.join(sys.prefix, 'src'))\n\ndefault_lib_dirs = filter(os.path.isdir, default_lib_dirs)\ndefault_include_dirs = filter(os.path.isdir, default_include_dirs)\ndefault_src_dirs = filter(os.path.isdir, default_src_dirs)\n\nso_ext = get_config_vars('SO')[0] or ''\n\ndef get_info(name,notfound_action=0):\n \"\"\"\n notfound_action:\n 0 - do nothing\n 1 - display warning message\n 2 - raise error\n \"\"\"\n cl = {'atlas':atlas_info, # use lapack_opt or blas_opt instead\n 'atlas_threads':atlas_threads_info, # ditto\n 'atlas_blas':atlas_blas_info,\n 'atlas_blas_threads':atlas_blas_threads_info,\n 'lapack_atlas':lapack_atlas_info, # use lapack_opt instead\n 'lapack_atlas_threads':lapack_atlas_threads_info, # ditto\n 'x11':x11_info,\n 'fftw':fftw_info,\n 'dfftw':dfftw_info,\n 'sfftw':sfftw_info,\n 'fftw_threads':fftw_threads_info,\n 'dfftw_threads':dfftw_threads_info,\n 'sfftw_threads':sfftw_threads_info,\n 'djbfft':djbfft_info,\n 'blas':blas_info, # use blas_opt instead\n 'lapack':lapack_info, # use lapack_opt instead\n 'lapack_src':lapack_src_info,\n 'blas_src':blas_src_info,\n 'numpy':numpy_info,\n 'numarray':numarray_info,\n 'lapack_opt':lapack_opt_info,\n 'blas_opt':blas_opt_info,\n 'boost_python':boost_python_info,\n 'agg2':agg2_info,\n 'wx':wx_info,\n 'gdk_pixbuf_xlib_2':gdk_pixbuf_xlib_2_info,\n 'gdk-pixbuf-xlib-2.0':gdk_pixbuf_xlib_2_info,\n 'gdk_pixbuf_2':gdk_pixbuf_2_info,\n 'gdk-pixbuf-2.0':gdk_pixbuf_2_info,\n 'gdk':gdk_info,\n 'gdk_2':gdk_2_info,\n 'gdk-2.0':gdk_2_info,\n 'gdk_x11_2':gdk_x11_2_info,\n 'gdk-x11-2.0':gdk_x11_2_info,\n 'gtkp_x11_2':gtkp_x11_2_info,\n 'gtk+-x11-2.0':gtkp_x11_2_info,\n 'gtkp_2':gtkp_2_info,\n 'gtk+-2.0':gtkp_2_info,\n 'xft':xft_info,\n 'freetype2':freetype2_info,\n }.get(name.lower(),system_info)\n return cl().get_info(notfound_action)\n\nclass NotFoundError(DistutilsError):\n \"\"\"Some third-party program or library is not found.\"\"\"\n\nclass AtlasNotFoundError(NotFoundError):\n \"\"\"\n Atlas (http://math-atlas.sourceforge.net/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [atlas]) or by setting\n the ATLAS environment variable.\"\"\"\n\nclass LapackNotFoundError(NotFoundError):\n \"\"\"\n Lapack (http://www.netlib.org/lapack/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [lapack]) or by setting\n the LAPACK environment variable.\"\"\"\n\nclass LapackSrcNotFoundError(LapackNotFoundError):\n \"\"\"\n Lapack (http://www.netlib.org/lapack/) sources not found.\n Directories to search for the sources can be specified in the\n scipy_distutils/site.cfg file (section [lapack_src]) or by setting\n the LAPACK_SRC environment variable.\"\"\"\n\nclass BlasNotFoundError(NotFoundError):\n \"\"\"\n Blas (http://www.netlib.org/blas/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [blas]) or by setting\n the BLAS environment variable.\"\"\"\n\nclass BlasSrcNotFoundError(BlasNotFoundError):\n \"\"\"\n Blas (http://www.netlib.org/blas/) sources not found.\n Directories to search for the sources can be specified in the\n scipy_distutils/site.cfg file (section [blas_src]) or by setting\n the BLAS_SRC environment variable.\"\"\"\n\nclass FFTWNotFoundError(NotFoundError):\n \"\"\"\n FFTW (http://www.fftw.org/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [fftw]) or by setting\n the FFTW environment variable.\"\"\"\n\nclass DJBFFTNotFoundError(NotFoundError):\n \"\"\"\n DJBFFT (http://cr.yp.to/djbfft.html) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [djbfft]) or by setting\n the DJBFFT environment variable.\"\"\"\n\nclass F2pyNotFoundError(NotFoundError):\n \"\"\"\n f2py2e (http://cens.ioc.ee/projects/f2py2e/) module not found.\n Get it from above location, install it, and retry setup.py.\"\"\"\n\nclass NumericNotFoundError(NotFoundError):\n \"\"\"\n Numeric (http://www.numpy.org/) module not found.\n Get it from above location, install it, and retry setup.py.\"\"\"\n\nclass X11NotFoundError(NotFoundError):\n \"\"\"X11 libraries not found.\"\"\"\n\nclass system_info:\n\n \"\"\" get_info() is the only public method. Don't use others.\n \"\"\"\n section = 'DEFAULT'\n dir_env_var = None\n search_static_first = 0 # XXX: disabled by default, may disappear in\n # future unless it is proved to be useful.\n verbosity = 1\n saved_results = {}\n\n notfounderror = NotFoundError\n\n def __init__ (self,\n default_lib_dirs=default_lib_dirs,\n default_include_dirs=default_include_dirs,\n verbosity = 1,\n ):\n self.__class__.info = {}\n self.local_prefixes = []\n defaults = {}\n defaults['libraries'] = ''\n defaults['library_dirs'] = os.pathsep.join(default_lib_dirs)\n defaults['include_dirs'] = os.pathsep.join(default_include_dirs)\n defaults['src_dirs'] = os.pathsep.join(default_src_dirs)\n defaults['search_static_first'] = str(self.search_static_first)\n self.cp = ConfigParser.ConfigParser(defaults)\n try:\n __file__\n except NameError:\n __file__ = sys.argv[0]\n cf = os.path.join(os.path.split(os.path.abspath(__file__))[0],\n 'site.cfg')\n self.cp.read([cf])\n if not self.cp.has_section(self.section):\n self.cp.add_section(self.section)\n self.search_static_first = self.cp.getboolean(self.section,\n 'search_static_first')\n assert isinstance(self.search_static_first, type(0))\n\n def calc_libraries_info(self):\n libs = self.get_libraries()\n dirs = self.get_lib_dirs()\n info = {}\n for lib in libs:\n i = None\n for d in dirs:\n i = self.check_libs(d,[lib]) \n if i is not None:\n break\n if i is not None:\n dict_append(info,**i)\n else:\n print 'Library %s was not found. Ignoring' % (lib)\n return info\n\n def set_info(self,**info):\n if info: \n lib_info = self.calc_libraries_info()\n dict_append(info,**lib_info)\n self.saved_results[self.__class__.__name__] = info\n\n def has_info(self):\n return self.saved_results.has_key(self.__class__.__name__)\n\n def get_info(self,notfound_action=0):\n \"\"\" Return a dictonary with items that are compatible\n with scipy_distutils.setup keyword arguments.\n \"\"\"\n flag = 0\n if not self.has_info():\n flag = 1\n if self.verbosity>0:\n print self.__class__.__name__ + ':'\n if hasattr(self, 'calc_info'):\n self.calc_info()\n if notfound_action:\n if not self.has_info():\n if notfound_action==1:\n warnings.warn(self.notfounderror.__doc__)\n elif notfound_action==2:\n raise self.notfounderror,self.notfounderror.__doc__\n else:\n raise ValueError,`notfound_action`\n\n if self.verbosity>0:\n if not self.has_info():\n print ' NOT AVAILABLE'\n self.set_info()\n else:\n print ' FOUND:'\n \n res = self.saved_results.get(self.__class__.__name__)\n if self.verbosity>0 and flag:\n for k,v in res.items():\n v = str(v)\n if k=='sources' and len(v)>200: v = v[:60]+' ...\\n... '+v[-60:]\n print ' %s = %s'%(k,v)\n print\n \n return res\n\n def get_paths(self, section, key):\n dirs = self.cp.get(section, key).split(os.pathsep)\n env_var = self.dir_env_var\n if env_var:\n if type(env_var) is type([]):\n e0 = env_var[-1]\n for e in env_var:\n if os.environ.has_key(e):\n e0 = e\n break\n if not env_var[0]==e0:\n print 'Setting %s=%s' % (env_var[0],e0)\n env_var = e0\n if env_var and os.environ.has_key(env_var):\n d = os.environ[env_var]\n if d=='None':\n print 'Disabled',self.__class__.__name__,'(%s is None)' \\\n % (self.dir_env_var)\n return []\n if os.path.isfile(d):\n dirs = [os.path.dirname(d)] + dirs\n l = getattr(self,'_lib_names',[])\n if len(l)==1:\n b = os.path.basename(d)\n b = os.path.splitext(b)[0]\n if b[:3]=='lib':\n print 'Replacing _lib_names[0]==%r with %r' \\\n % (self._lib_names[0], b[3:])\n self._lib_names[0] = b[3:]\n else:\n ds = d.split(os.pathsep)\n ds2 = []\n for d in ds:\n if os.path.isdir(d):\n ds2.append(d)\n for dd in ['include','lib']:\n d1 = os.path.join(d,dd)\n if os.path.isdir(d1):\n ds2.append(d1)\n dirs = ds2 + dirs\n default_dirs = self.cp.get('DEFAULT', key).split(os.pathsep)\n dirs.extend(default_dirs)\n ret = []\n [ret.append(d) for d in dirs if os.path.isdir(d) and d not in ret]\n if self.verbosity>1:\n print '(',key,'=',':'.join(ret),')'\n return ret\n\n def get_lib_dirs(self, key='library_dirs'):\n return self.get_paths(self.section, key)\n\n def get_include_dirs(self, key='include_dirs'):\n return self.get_paths(self.section, key)\n\n def get_src_dirs(self, key='src_dirs'):\n return self.get_paths(self.section, key)\n\n def get_libs(self, key, default):\n try:\n libs = self.cp.get(self.section, key)\n except ConfigParser.NoOptionError:\n if not default:\n return []\n if type(default) is type(''):\n return [default]\n return default\n return [b for b in [a.strip() for a in libs.split(',')] if b]\n\n def get_libraries(self, key='libraries'):\n return self.get_libs(key,'')\n\n def check_libs(self,lib_dir,libs,opt_libs =[]):\n \"\"\" If static or shared libraries are available then return\n their info dictionary. \"\"\"\n if self.search_static_first:\n exts = ['.a',so_ext]\n else:\n exts = [so_ext,'.a']\n if sys.platform=='cygwin':\n exts.append('.dll.a')\n for ext in exts:\n info = self._check_libs(lib_dir,libs,opt_libs,ext)\n if info is not None: return info\n return\n\n def _lib_list(self, lib_dir, libs, ext):\n assert type(lib_dir) is type('')\n liblist = []\n for l in libs:\n p = self.combine_paths(lib_dir, 'lib'+l+ext)\n if p:\n assert len(p)==1\n liblist.append(p[0])\n return liblist\n\n def _extract_lib_names(self,libs):\n return [os.path.splitext(os.path.basename(p))[0][3:] \\\n for p in libs]\n\n def _check_libs(self,lib_dir,libs, opt_libs, ext):\n found_libs = self._lib_list(lib_dir, libs, ext)\n if len(found_libs) == len(libs):\n found_libs = self._extract_lib_names(found_libs)\n info = {'libraries' : found_libs, 'library_dirs' : [lib_dir]}\n opt_found_libs = self._lib_list(lib_dir, opt_libs, ext)\n if len(opt_found_libs) == len(opt_libs):\n opt_found_libs = self._extract_lib_names(opt_found_libs)\n info['libraries'].extend(opt_found_libs)\n return info\n\n def combine_paths(self,*args):\n return combine_paths(*args,**{'verbosity':self.verbosity})\n\nclass fftw_info(system_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['rfftw', 'fftw']\n includes = ['fftw.h','rfftw.h']\n macros = [('SCIPY_FFTW_H',None)]\n notfounderror = FFTWNotFoundError\n\n def __init__(self):\n system_info.__init__(self)\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n incl_dirs = self.get_include_dirs()\n incl_dir = None\n libs = self.get_libs(self.section+'_libs', self.libs)\n info = None\n for d in lib_dirs:\n r = self.check_libs(d,libs)\n if r is not None:\n info = r\n break\n if info is not None:\n flag = 0\n for d in incl_dirs:\n if len(self.combine_paths(d,self.includes))==2:\n dict_append(info,include_dirs=[d])\n flag = 1\n incl_dirs = [d]\n incl_dir = d\n break\n if flag:\n dict_append(info,define_macros=self.macros)\n else:\n info = None\n if info is not None:\n self.set_info(**info)\n\nclass dfftw_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['drfftw','dfftw']\n includes = ['dfftw.h','drfftw.h']\n macros = [('SCIPY_DFFTW_H',None)]\n\nclass sfftw_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['srfftw','sfftw']\n includes = ['sfftw.h','srfftw.h']\n macros = [('SCIPY_SFFTW_H',None)]\n\nclass fftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['rfftw_threads','fftw_threads']\n includes = ['fftw_threads.h','rfftw_threads.h']\n macros = [('SCIPY_FFTW_THREADS_H',None)]\n\nclass dfftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['drfftw_threads','dfftw_threads']\n includes = ['dfftw_threads.h','drfftw_threads.h']\n macros = [('SCIPY_DFFTW_THREADS_H',None)]\n\nclass sfftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['srfftw_threads','sfftw_threads']\n includes = ['sfftw_threads.h','srfftw_threads.h']\n macros = [('SCIPY_SFFTW_THREADS_H',None)]\n\nclass djbfft_info(system_info):\n section = 'djbfft'\n dir_env_var = 'DJBFFT'\n notfounderror = DJBFFTNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend(self.combine_paths(d,['djbfft'])+[d])\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n incl_dirs = self.get_include_dirs()\n info = None\n for d in lib_dirs:\n p = self.combine_paths (d,['djbfft.a'])\n if p:\n info = {'extra_objects':p}\n break\n p = self.combine_paths (d,['libdjbfft.a'])\n if p:\n info = {'libraries':['djbfft'],'library_dirs':[d]}\n break\n if info is None:\n return\n for d in incl_dirs:\n if len(self.combine_paths(d,['fftc8.h','fftfreq.h']))==2:\n dict_append(info,include_dirs=[d],\n define_macros=[('SCIPY_DJBFFT_H',None)])\n self.set_info(**info)\n return\n return\n\nclass atlas_info(system_info):\n section = 'atlas'\n dir_env_var = 'ATLAS'\n _lib_names = ['f77blas','cblas']\n if sys.platform[:7]=='freebsd':\n _lib_atlas = ['atlas_r']\n _lib_lapack = ['alapack_r']\n else:\n _lib_atlas = ['atlas']\n _lib_lapack = ['lapack']\n\n notfounderror = AtlasNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend(self.combine_paths(d,['atlas*','ATLAS*',\n 'sse','3dnow','sse2'])+[d])\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n info = {}\n atlas_libs = self.get_libs('atlas_libs',\n self._lib_names + self._lib_atlas)\n lapack_libs = self.get_libs('lapack_libs',self._lib_lapack)\n atlas = None\n lapack = None\n atlas_1 = None\n for d in lib_dirs:\n atlas = self.check_libs(d,atlas_libs,[])\n lapack_atlas = self.check_libs(d,['lapack_atlas'],[])\n if atlas is not None:\n lib_dirs2 = self.combine_paths(d,['atlas*','ATLAS*'])+[d]\n for d2 in lib_dirs2:\n lapack = self.check_libs(d2,lapack_libs,[])\n if lapack is not None:\n break\n else:\n lapack = None\n if lapack is not None:\n break\n if atlas:\n atlas_1 = atlas\n print self.__class__\n if atlas is None:\n atlas = atlas_1\n if atlas is None:\n return\n include_dirs = self.get_include_dirs()\n h = (self.combine_paths(lib_dirs+include_dirs,'cblas.h') or [None])[0]\n if h:\n h = os.path.dirname(h)\n dict_append(info,include_dirs=[h])\n info['language'] = 'c'\n if lapack is not None:\n dict_append(info,**lapack)\n dict_append(info,**atlas)\n elif 'lapack_atlas' in atlas['libraries']:\n dict_append(info,**atlas)\n dict_append(info,define_macros=[('ATLAS_WITH_LAPACK_ATLAS',None)])\n self.set_info(**info)\n return\n else:\n dict_append(info,**atlas)\n dict_append(info,define_macros=[('ATLAS_WITHOUT_LAPACK',None)])\n message = \"\"\"\n*********************************************************************\n Could not find lapack library within the ATLAS installation.\n*********************************************************************\n\"\"\"\n warnings.warn(message)\n self.set_info(**info)\n return\n \n # Check if lapack library is complete, only warn if it is not.\n lapack_dir = lapack['library_dirs'][0]\n lapack_name = lapack['libraries'][0]\n lapack_lib = None\n for e in ['.a',so_ext]:\n fn = os.path.join(lapack_dir,'lib'+lapack_name+e)\n if os.path.exists(fn):\n lapack_lib = fn\n break\n if lapack_lib is not None:\n sz = os.stat(lapack_lib)[6]\n if sz <= 4000*1024:\n message = \"\"\"\n*********************************************************************\n Lapack library (from ATLAS) is probably incomplete:\n size of %s is %sk (expected >4000k)\n\n Follow the instructions in the KNOWN PROBLEMS section of the file\n scipy/INSTALL.txt.\n*********************************************************************\n\"\"\" % (lapack_lib,sz/1024)\n warnings.warn(message)\n else:\n info['language'] = 'f77'\n\n self.set_info(**info)\n\nclass atlas_blas_info(atlas_info):\n _lib_names = ['f77blas','cblas']\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n info = {}\n atlas_libs = self.get_libs('atlas_libs',\n self._lib_names + self._lib_atlas)\n atlas = None\n for d in lib_dirs:\n atlas = self.check_libs(d,atlas_libs,[])\n if atlas is not None:\n break\n if atlas is None:\n return\n include_dirs = self.get_include_dirs()\n h = (self.combine_paths(lib_dirs+include_dirs,'cblas.h') or [None])[0]\n if h:\n h = os.path.dirname(h)\n dict_append(info,include_dirs=[h])\n info['language'] = 'c'\n\n dict_append(info,**atlas)\n\n self.set_info(**info)\n return\n\n\nclass atlas_threads_info(atlas_info):\n dir_env_var = ['PTATLAS','ATLAS']\n _lib_names = ['ptf77blas','ptcblas']\n\nclass atlas_blas_threads_info(atlas_blas_info):\n dir_env_var = ['PTATLAS','ATLAS']\n _lib_names = ['ptf77blas','ptcblas']\n\nclass lapack_atlas_info(atlas_info):\n _lib_names = ['lapack_atlas'] + atlas_info._lib_names\n\nclass lapack_atlas_threads_info(atlas_threads_info):\n _lib_names = ['lapack_atlas'] + atlas_threads_info._lib_names\n\nclass lapack_info(system_info):\n section = 'lapack'\n dir_env_var = 'LAPACK'\n _lib_names = ['lapack']\n notfounderror = LapackNotFoundError\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n\n lapack_libs = self.get_libs('lapack_libs', self._lib_names)\n for d in lib_dirs:\n lapack = self.check_libs(d,lapack_libs,[])\n if lapack is not None:\n info = lapack \n break\n else:\n return\n info['language'] = 'f77'\n self.set_info(**info)\n\nclass lapack_src_info(system_info):\n section = 'lapack_src'\n dir_env_var = 'LAPACK_SRC'\n notfounderror = LapackSrcNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['LAPACK*/SRC','SRC']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'dgesv.f')):\n src_dir = d\n break\n if not src_dir:\n #XXX: Get sources from netlib. May be ask first.\n return\n # The following is extracted from LAPACK-3.0/SRC/Makefile\n allaux='''\n ilaenv ieeeck lsame lsamen xerbla\n ''' # *.f\n laux = '''\n bdsdc bdsqr disna labad lacpy ladiv lae2 laebz laed0 laed1\n laed2 laed3 laed4 laed5 laed6 laed7 laed8 laed9 laeda laev2\n lagtf lagts lamch lamrg lanst lapy2 lapy3 larnv larrb larre\n larrf lartg laruv las2 lascl lasd0 lasd1 lasd2 lasd3 lasd4\n lasd5 lasd6 lasd7 lasd8 lasd9 lasda lasdq lasdt laset lasq1\n lasq2 lasq3 lasq4 lasq5 lasq6 lasr lasrt lassq lasv2 pttrf\n stebz stedc steqr sterf\n ''' # [s|d]*.f\n lasrc = '''\n gbbrd gbcon gbequ gbrfs gbsv gbsvx gbtf2 gbtrf gbtrs gebak\n gebal gebd2 gebrd gecon geequ gees geesx geev geevx gegs gegv\n gehd2 gehrd gelq2 gelqf gels gelsd gelss gelsx gelsy geql2\n geqlf geqp3 geqpf geqr2 geqrf gerfs gerq2 gerqf gesc2 gesdd\n gesv gesvd gesvx getc2 getf2 getrf getri getrs ggbak ggbal\n gges ggesx ggev ggevx ggglm gghrd gglse ggqrf ggrqf ggsvd\n ggsvp gtcon gtrfs gtsv gtsvx gttrf gttrs gtts2 hgeqz hsein\n hseqr labrd lacon laein lags2 lagtm lahqr lahrd laic1 lals0\n lalsa lalsd langb lange langt lanhs lansb lansp lansy lantb\n lantp lantr lapll lapmt laqgb laqge laqp2 laqps laqsb laqsp\n laqsy lar1v lar2v larf larfb larfg larft larfx largv larrv\n lartv larz larzb larzt laswp lasyf latbs latdf latps latrd\n latrs latrz latzm lauu2 lauum pbcon pbequ pbrfs pbstf pbsv\n pbsvx pbtf2 pbtrf pbtrs pocon poequ porfs posv posvx potf2\n potrf potri potrs ppcon ppequ pprfs ppsv ppsvx pptrf pptri\n pptrs ptcon pteqr ptrfs ptsv ptsvx pttrs ptts2 spcon sprfs\n spsv spsvx sptrf sptri sptrs stegr stein sycon syrfs sysv\n sysvx sytf2 sytrf sytri sytrs tbcon tbrfs tbtrs tgevc tgex2\n tgexc tgsen tgsja tgsna tgsy2 tgsyl tpcon tprfs tptri tptrs\n trcon trevc trexc trrfs trsen trsna trsyl trti2 trtri trtrs\n tzrqf tzrzf\n ''' # [s|c|d|z]*.f\n sd_lasrc = '''\n laexc lag2 lagv2 laln2 lanv2 laqtr lasy2 opgtr opmtr org2l\n org2r orgbr orghr orgl2 orglq orgql orgqr orgr2 orgrq orgtr\n orm2l orm2r ormbr ormhr orml2 ormlq ormql ormqr ormr2 ormr3\n ormrq ormrz ormtr rscl sbev sbevd sbevx sbgst sbgv sbgvd sbgvx\n sbtrd spev spevd spevx spgst spgv spgvd spgvx sptrd stev stevd\n stevr stevx syev syevd syevr syevx sygs2 sygst sygv sygvd\n sygvx sytd2 sytrd\n ''' # [s|d]*.f\n cz_lasrc = '''\n bdsqr hbev hbevd hbevx hbgst hbgv hbgvd hbgvx hbtrd hecon heev\n heevd heevr heevx hegs2 hegst hegv hegvd hegvx herfs hesv\n hesvx hetd2 hetf2 hetrd hetrf hetri hetrs hpcon hpev hpevd\n hpevx hpgst hpgv hpgvd hpgvx hprfs hpsv hpsvx hptrd hptrf\n hptri hptrs lacgv lacp2 lacpy lacrm lacrt ladiv laed0 laed7\n laed8 laesy laev2 lahef lanhb lanhe lanhp lanht laqhb laqhe\n laqhp larcm larnv lartg lascl laset lasr lassq pttrf rot spmv\n spr stedc steqr symv syr ung2l ung2r ungbr unghr ungl2 unglq\n ungql ungqr ungr2 ungrq ungtr unm2l unm2r unmbr unmhr unml2\n unmlq unmql unmqr unmr2 unmr3 unmrq unmrz unmtr upgtr upmtr\n ''' # [c|z]*.f\n #######\n sclaux = laux + ' econd ' # s*.f\n dzlaux = laux + ' secnd ' # d*.f\n slasrc = lasrc + sd_lasrc # s*.f\n dlasrc = lasrc + sd_lasrc # d*.f\n clasrc = lasrc + cz_lasrc + ' srot srscl ' # c*.f\n zlasrc = lasrc + cz_lasrc + ' drot drscl ' # z*.f\n oclasrc = ' icmax1 scsum1 ' # *.f\n ozlasrc = ' izmax1 dzsum1 ' # *.f\n sources = ['s%s.f'%f for f in (sclaux+slasrc).split()] \\\n + ['d%s.f'%f for f in (dzlaux+dlasrc).split()] \\\n + ['c%s.f'%f for f in (clasrc).split()] \\\n + ['z%s.f'%f for f in (zlasrc).split()] \\\n + ['%s.f'%f for f in (allaux+oclasrc+ozlasrc).split()]\n sources = [os.path.join(src_dir,f) for f in sources]\n #XXX: should we check here actual existence of source files?\n info = {'sources':sources,'language':'f77'}\n self.set_info(**info)\n\natlas_version_c_text = r'''\n/* This file is generated from scipy_distutils/system_info.py */\n#ifdef __CPLUSPLUS__\nextern \"C\" {\n#endif\n#include \"Python.h\"\nstatic PyMethodDef module_methods[] = { {NULL,NULL} };\nDL_EXPORT(void) initatlas_version(void) {\n void ATL_buildinfo(void);\n ATL_buildinfo();\n Py_InitModule(\"atlas_version\", module_methods);\n}\n#ifdef __CPLUSCPLUS__\n}\n#endif\n'''\n\ndef get_atlas_version(**config):\n from core import Extension, setup\n from misc_util import get_build_temp\n import log\n magic = hex(hash(`config`))\n def atlas_version_c(extension, build_dir,magic=magic):\n source = os.path.join(build_dir,'atlas_version_%s.c' % (magic))\n if os.path.isfile(source):\n from distutils.dep_util import newer\n if newer(source,__file__):\n return source\n f = open(source,'w')\n f.write(atlas_version_c_text)\n f.close()\n return source\n ext = Extension('atlas_version',\n sources=[atlas_version_c],\n **config)\n extra_args = ['--build-lib',get_build_temp()]\n for a in sys.argv:\n if re.match('[-][-]compiler[=]',a):\n extra_args.append(a)\n try:\n dist = setup(ext_modules=[ext],\n script_name = 'get_atlas_version',\n script_args = ['build_src','build_ext']+extra_args)\n except Exception,msg:\n print \"##### msg: %s\" % msg\n if not msg:\n msg = \"Unknown Exception\"\n log.warn(msg)\n return None\n\n from distutils.sysconfig import get_config_var\n so_ext = get_config_var('SO')\n build_ext = dist.get_command_obj('build_ext')\n target = os.path.join(build_ext.build_lib,'atlas_version'+so_ext)\n from exec_command import exec_command,get_pythonexe\n cmd = [get_pythonexe(),'-c',\n '\"import imp;imp.load_dynamic(\\\\\"atlas_version\\\\\",\\\\\"%s\\\\\")\"'\\\n % (os.path.basename(target))]\n s,o = exec_command(cmd,execute_in=os.path.dirname(target),use_tee=0)\n atlas_version = None\n if not s:\n m = re.search(r'ATLAS version (?P\\d+[.]\\d+[.]\\d+)',o)\n if m:\n atlas_version = m.group('version')\n if atlas_version is None:\n if re.search(r'undefined symbol: ATL_buildinfo',o,re.M):\n atlas_version = '3.2.1_pre3.3.6'\n else:\n print 'Command:',' '.join(cmd)\n print 'Status:',s\n print 'Output:',o\n return atlas_version\n\n\nclass lapack_opt_info(system_info):\n \n def calc_info(self):\n\n if sys.platform=='darwin' and not os.environ.get('ATLAS',None):\n args = []\n link_args = []\n if os.path.exists('/System/Library/Frameworks/Accelerate.framework/'):\n args.extend(['-faltivec','-framework','Accelerate'])\n link_args.extend(['-Wl,-framework','-Wl,Accelerate'])\n elif os.path.exists('/System/Library/Frameworks/vecLib.framework/'):\n args.extend(['-faltivec','-framework','vecLib'])\n link_args.extend(['-Wl,-framework','-Wl,vecLib'])\n if args:\n self.set_info(extra_compile_args=args,\n extra_link_args=link_args,\n define_macros=[('NO_ATLAS_INFO',3)])\n return\n\n atlas_info = get_info('atlas_threads')\n if not atlas_info:\n atlas_info = get_info('atlas')\n #atlas_info = {} ## uncomment for testing\n atlas_version = None\n need_lapack = 0\n need_blas = 0\n info = {}\n if atlas_info:\n version_info = atlas_info.copy()\n atlas_version = get_atlas_version(**version_info)\n if not atlas_info.has_key('define_macros'):\n atlas_info['define_macros'] = []\n if atlas_version is None:\n atlas_info['define_macros'].append(('NO_ATLAS_INFO',2))\n else:\n atlas_info['define_macros'].append(('ATLAS_INFO',\n '\"\\\\\"%s\\\\\"\"' % atlas_version))\n\t\tif atlas_version=='3.2.1_pre3.3.6':\n\t\t atlas_info['define_macros'].append(('NO_ATLAS_INFO',4))\n l = atlas_info.get('define_macros',[])\n if ('ATLAS_WITH_LAPACK_ATLAS',None) in l \\\n or ('ATLAS_WITHOUT_LAPACK',None) in l:\n need_lapack = 1\n info = atlas_info\n else:\n warnings.warn(AtlasNotFoundError.__doc__)\n need_blas = 1\n need_lapack = 1\n dict_append(info,define_macros=[('NO_ATLAS_INFO',1)])\n\n if need_lapack:\n lapack_info = get_info('lapack')\n #lapack_info = {} ## uncomment for testing\n if lapack_info:\n dict_append(info,**lapack_info)\n else:\n warnings.warn(LapackNotFoundError.__doc__)\n lapack_src_info = get_info('lapack_src')\n if not lapack_src_info:\n warnings.warn(LapackSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('flapack_src',lapack_src_info)])\n\n if need_blas:\n blas_info = get_info('blas')\n #blas_info = {} ## uncomment for testing\n if blas_info:\n dict_append(info,**blas_info)\n else:\n warnings.warn(BlasNotFoundError.__doc__)\n blas_src_info = get_info('blas_src')\n if not blas_src_info:\n warnings.warn(BlasSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('fblas_src',blas_src_info)])\n\n self.set_info(**info)\n return\n\n\nclass blas_opt_info(system_info):\n \n def calc_info(self):\n\n if sys.platform=='darwin' and not os.environ.get('ATLAS',None):\n args = []\n link_args = []\n if os.path.exists('/System/Library/Frameworks/Accelerate.framework/'):\n args.extend(['-faltivec','-framework','Accelerate'])\n link_args.extend(['-Wl,-framework','-Wl,Accelerate'])\n elif os.path.exists('/System/Library/Frameworks/vecLib.framework/'):\n args.extend(['-faltivec','-framework','vecLib'])\n link_args.extend(['-Wl,-framework','-Wl,vecLib'])\n if args:\n self.set_info(extra_compile_args=args,\n extra_link_args=link_args,\n define_macros=[('NO_ATLAS_INFO',3)])\n return\n\n atlas_info = get_info('atlas_blas_threads')\n if not atlas_info:\n atlas_info = get_info('atlas_blas')\n atlas_version = None\n need_blas = 0\n info = {}\n if atlas_info:\n version_info = atlas_info.copy()\n atlas_version = get_atlas_version(**version_info)\n if not atlas_info.has_key('define_macros'):\n atlas_info['define_macros'] = []\n if atlas_version is None:\n atlas_info['define_macros'].append(('NO_ATLAS_INFO',2))\n else:\n atlas_info['define_macros'].append(('ATLAS_INFO',\n '\"\\\\\"%s\\\\\"\"' % atlas_version))\n info = atlas_info\n else:\n warnings.warn(AtlasNotFoundError.__doc__)\n need_blas = 1\n dict_append(info,define_macros=[('NO_ATLAS_INFO',1)])\n\n if need_blas:\n blas_info = get_info('blas')\n if blas_info:\n dict_append(info,**blas_info)\n else:\n warnings.warn(BlasNotFoundError.__doc__)\n blas_src_info = get_info('blas_src')\n if not blas_src_info:\n warnings.warn(BlasSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('fblas_src',blas_src_info)])\n\n self.set_info(**info)\n return\n\n\nclass blas_info(system_info):\n section = 'blas'\n dir_env_var = 'BLAS'\n _lib_names = ['blas']\n notfounderror = BlasNotFoundError\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n\n blas_libs = self.get_libs('blas_libs', self._lib_names)\n for d in lib_dirs:\n blas = self.check_libs(d,blas_libs,[])\n if blas is not None:\n info = blas \n break\n else:\n return\n info['language'] = 'f77' # XXX: is it generally true?\n self.set_info(**info)\n\n\nclass blas_src_info(system_info):\n section = 'blas_src'\n dir_env_var = 'BLAS_SRC'\n notfounderror = BlasSrcNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['blas']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'daxpy.f')):\n src_dir = d\n break\n if not src_dir:\n #XXX: Get sources from netlib. May be ask first.\n return\n blas1 = '''\n caxpy csscal dnrm2 dzasum saxpy srotg zdotc ccopy cswap drot\n dznrm2 scasum srotm zdotu cdotc dasum drotg icamax scnrm2\n srotmg zdrot cdotu daxpy drotm idamax scopy sscal zdscal crotg\n dcabs1 drotmg isamax sdot sswap zrotg cscal dcopy dscal izamax\n snrm2 zaxpy zscal csrot ddot dswap sasum srot zcopy zswap\n '''\n blas2 = '''\n cgbmv chpmv ctrsv dsymv dtrsv sspr2 strmv zhemv ztpmv cgemv\n chpr dgbmv dsyr lsame ssymv strsv zher ztpsv cgerc chpr2 dgemv\n dsyr2 sgbmv ssyr xerbla zher2 ztrmv cgeru ctbmv dger dtbmv\n sgemv ssyr2 zgbmv zhpmv ztrsv chbmv ctbsv dsbmv dtbsv sger\n stbmv zgemv zhpr chemv ctpmv dspmv dtpmv ssbmv stbsv zgerc\n zhpr2 cher ctpsv dspr dtpsv sspmv stpmv zgeru ztbmv cher2\n ctrmv dspr2 dtrmv sspr stpsv zhbmv ztbsv\n '''\n blas3 = '''\n cgemm csymm ctrsm dsyrk sgemm strmm zhemm zsyr2k chemm csyr2k\n dgemm dtrmm ssymm strsm zher2k zsyrk cher2k csyrk dsymm dtrsm\n ssyr2k zherk ztrmm cherk ctrmm dsyr2k ssyrk zgemm zsymm ztrsm\n '''\n sources = [os.path.join(src_dir,f+'.f') \\\n for f in (blas1+blas2+blas3).split()]\n #XXX: should we check here actual existence of source files?\n info = {'sources':sources,'language':'f77'}\n self.set_info(**info)\n\nclass x11_info(system_info):\n section = 'x11'\n notfounderror = X11NotFoundError\n\n def __init__(self):\n system_info.__init__(self,\n default_lib_dirs=default_x11_lib_dirs,\n default_include_dirs=default_x11_include_dirs)\n\n def calc_info(self):\n if sys.platform in ['win32']:\n return\n lib_dirs = self.get_lib_dirs()\n include_dirs = self.get_include_dirs()\n x11_libs = self.get_libs('x11_libs', ['X11'])\n for lib_dir in lib_dirs:\n info = self.check_libs(lib_dir, x11_libs, [])\n if info is not None:\n break\n else:\n return\n inc_dir = None\n for d in include_dirs:\n if self.combine_paths(d, 'X11/X.h'):\n inc_dir = d\n break\n if inc_dir is not None:\n dict_append(info, include_dirs=[inc_dir])\n self.set_info(**info)\n\nclass numpy_info(system_info):\n section = 'numpy'\n modulename = 'Numeric'\n notfounderror = NumericNotFoundError\n\n def __init__(self):\n from distutils.sysconfig import get_python_inc\n include_dirs = []\n try:\n module = __import__(self.modulename)\n prefix = []\n for name in module.__file__.split(os.sep):\n if name=='lib':\n break\n prefix.append(name)\n include_dirs.append(get_python_inc(prefix=os.sep.join(prefix)))\n except ImportError:\n pass\n py_incl_dir = get_python_inc()\n include_dirs.append(py_incl_dir)\n for d in default_include_dirs:\n d = os.path.join(d, os.path.basename(py_incl_dir))\n if d not in include_dirs:\n include_dirs.append(d)\n system_info.__init__(self,\n default_lib_dirs=[],\n default_include_dirs=include_dirs)\n\n def calc_info(self):\n try:\n module = __import__(self.modulename)\n except ImportError:\n return\n info = {}\n macros = [(self.modulename.upper()+'_VERSION',\n '\"\\\\\"%s\\\\\"\"' % (module.__version__))]\n## try:\n## macros.append(\n## (self.modulename.upper()+'_VERSION_HEX',\n## hex(vstr2hex(module.__version__))),\n## )\n## except Exception,msg:\n## print msg\n dict_append(info, define_macros = macros)\n include_dirs = self.get_include_dirs()\n inc_dir = None\n for d in include_dirs:\n if self.combine_paths(d,\n os.path.join(self.modulename,\n 'arrayobject.h')):\n inc_dir = d\n break\n if inc_dir is not None:\n dict_append(info, include_dirs=[inc_dir])\n if info:\n self.set_info(**info)\n return\n\nclass numarray_info(numpy_info):\n section = 'numarray'\n modulename = 'numarray'\n\nclass boost_python_info(system_info):\n section = 'boost_python'\n dir_env_var = 'BOOST'\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['boost*']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n from distutils.sysconfig import get_python_inc\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'libs','python','src','module.cpp')):\n src_dir = d\n break\n if not src_dir:\n return\n py_incl_dir = get_python_inc()\n srcs_dir = os.path.join(src_dir,'libs','python','src')\n bpl_srcs = glob(os.path.join(srcs_dir,'*.cpp'))\n bpl_srcs += glob(os.path.join(srcs_dir,'*','*.cpp'))\n info = {'libraries':[('boost_python_src',{'include_dirs':[src_dir,py_incl_dir],\n 'sources':bpl_srcs})],\n 'include_dirs':[src_dir],\n }\n if info:\n self.set_info(**info)\n return\n\nclass agg2_info(system_info):\n section = 'agg2'\n dir_env_var = 'AGG2'\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['agg2*']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'src','agg_affine_matrix.cpp')):\n src_dir = d\n break\n if not src_dir:\n return\n if sys.platform=='win32':\n agg2_srcs = glob(os.path.join(src_dir,'src','platform','win32','agg_win32_bmp.cpp'))\n else:\n agg2_srcs = glob(os.path.join(src_dir,'src','*.cpp'))\n agg2_srcs += [os.path.join(src_dir,'src','platform','X11','agg_platform_support.cpp')]\n \n info = {'libraries':[('agg2_src',{'sources':agg2_srcs,\n 'include_dirs':[os.path.join(src_dir,'include')],\n })],\n 'include_dirs':[os.path.join(src_dir,'include')],\n }\n if info:\n self.set_info(**info)\n return\n\nclass _pkg_config_info(system_info):\n section = None\n config_env_var = 'PKG_CONFIG'\n default_config_exe = 'pkg-config'\n append_config_exe = ''\n version_macro_name = None\n release_macro_name = None\n version_flag = '--modversion'\n cflags_flag = '--cflags'\n\n def get_config_exe(self):\n if os.environ.has_key(self.config_env_var):\n return os.environ[self.config_env_var]\n return self.default_config_exe\n def get_config_output(self, config_exe, option):\n s,o = exec_command(config_exe+' '+self.append_config_exe+' '+option,use_tee=0)\n if not s:\n return o\n\n def calc_info(self):\n config_exe = find_executable(self.get_config_exe())\n if not os.path.isfile(config_exe):\n print 'File not found: %s. Cannot determine %s info.' \\\n % (config_exe, self.section)\n return\n info = {}\n macros = []\n libraries = []\n library_dirs = []\n include_dirs = []\n extra_link_args = []\n extra_compile_args = []\n version = self.get_config_output(config_exe,self.version_flag)\n if version:\n macros.append((self.__class__.__name__.split('.')[-1].upper(),\n '\"\\\\\"%s\\\\\"\"' % (version)))\n if self.version_macro_name:\n macros.append((self.version_macro_name+'_%s' % (version.replace('.','_')),None))\n if self.release_macro_name:\n release = self.get_config_output(config_exe,'--release')\n if release:\n macros.append((self.release_macro_name+'_%s' % (release.replace('.','_')),None))\n opts = self.get_config_output(config_exe,'--libs')\n if opts:\n for opt in opts.split():\n if opt[:2]=='-l':\n libraries.append(opt[2:])\n elif opt[:2]=='-L':\n library_dirs.append(opt[2:])\n else:\n extra_link_args.append(opt)\n opts = self.get_config_output(config_exe,self.cflags_flag)\n if opts:\n for opt in opts.split():\n if opt[:2]=='-I':\n include_dirs.append(opt[2:])\n elif opt[:2]=='-D':\n if '=' in opt:\n n,v = opt[2:].split('=')\n macros.append((n,v))\n else:\n macros.append((opt[2:],None))\n else:\n extra_compile_args.append(opt)\n if macros: dict_append(info, define_macros = macros)\n if libraries: dict_append(info, libraries = libraries)\n if library_dirs: dict_append(info, library_dirs = library_dirs)\n if include_dirs: dict_append(info, include_dirs = include_dirs)\n if extra_link_args: dict_append(info, extra_link_args = extra_link_args)\n if extra_compile_args: dict_append(info, extra_compile_args = extra_compile_args)\n if info:\n self.set_info(**info)\n return\n\nclass wx_info(_pkg_config_info):\n section = 'wx'\n config_env_var = 'WX_CONFIG'\n default_config_exe = 'wx-config'\n append_config_exe = ''\n version_macro_name = 'WX_VERSION'\n release_macro_name = 'WX_RELEASE'\n version_flag = '--version'\n cflags_flag = '--cxxflags'\n\nclass gdk_pixbuf_xlib_2_info(_pkg_config_info):\n section = 'gdk_pixbuf_xlib_2'\n append_config_exe = 'gdk-pixbuf-xlib-2.0'\n version_macro_name = 'GDK_PIXBUF_XLIB_VERSION'\n\nclass gdk_pixbuf_2_info(_pkg_config_info):\n section = 'gdk_pixbuf_2'\n append_config_exe = 'gdk-pixbuf-2.0'\n version_macro_name = 'GDK_PIXBUF_VERSION'\n\nclass gdk_x11_2_info(_pkg_config_info):\n section = 'gdk_x11_2'\n append_config_exe = 'gdk-x11-2.0'\n version_macro_name = 'GDK_X11_VERSION'\n\nclass gdk_2_info(_pkg_config_info):\n section = 'gdk_2'\n append_config_exe = 'gdk-2.0'\n version_macro_name = 'GDK_VERSION'\n\nclass gdk_info(_pkg_config_info):\n section = 'gdk'\n append_config_exe = 'gdk'\n version_macro_name = 'GDK_VERSION'\n\nclass gtkp_x11_2_info(_pkg_config_info):\n section = 'gtkp_x11_2'\n append_config_exe = 'gtk+-x11-2.0'\n version_macro_name = 'GTK_X11_VERSION'\n\n\nclass gtkp_2_info(_pkg_config_info):\n section = 'gtkp_2'\n append_config_exe = 'gtk+-2.0'\n version_macro_name = 'GTK_VERSION'\n\nclass xft_info(_pkg_config_info):\n section = 'xft'\n append_config_exe = 'xft'\n version_macro_name = 'XFT_VERSION'\n\nclass freetype2_info(_pkg_config_info):\n section = 'freetype2'\n append_config_exe = 'freetype2'\n version_macro_name = 'FREETYPE2_VERSION'\n\n## def vstr2hex(version):\n## bits = []\n## n = [24,16,8,4,0]\n## r = 0\n## for s in version.split('.'):\n## r |= int(s) << n[0]\n## del n[0]\n## return r\n\n#--------------------------------------------------------------------\n\ndef combine_paths(*args,**kws):\n \"\"\" Return a list of existing paths composed by all combinations of\n items from arguments.\n \"\"\"\n r = []\n for a in args:\n if not a: continue\n if type(a) is types.StringType:\n a = [a]\n r.append(a)\n args = r\n if not args: return []\n if len(args)==1:\n result = reduce(lambda a,b:a+b,map(glob,args[0]),[])\n elif len (args)==2:\n result = []\n for a0 in args[0]:\n for a1 in args[1]:\n result.extend(glob(os.path.join(a0,a1)))\n else:\n result = combine_paths(*(combine_paths(args[0],args[1])+args[2:]))\n verbosity = kws.get('verbosity',1)\n if verbosity>1 and result:\n print '(','paths:',','.join(result),')'\n return result\n\nlanguage_map = {'c':0,'c++':1,'f77':2,'f90':3}\ninv_language_map = {0:'c',1:'c++',2:'f77',3:'f90'}\ndef dict_append(d,**kws):\n languages = []\n for k,v in kws.items():\n if k=='language':\n languages.append(v)\n continue\n if d.has_key(k):\n if k in ['library_dirs','include_dirs','define_macros']:\n [d[k].append(vv) for vv in v if vv not in d[k]]\n else:\n d[k].extend(v)\n else:\n d[k] = v\n if languages:\n l = inv_language_map[max([language_map.get(l,0) for l in languages])]\n d['language'] = l\n return\n\ndef show_all():\n import system_info\n import pprint\n match_info = re.compile(r'.*?_info').match\n show_only = []\n for n in sys.argv[1:]:\n if n[-5:] != '_info':\n n = n + '_info'\n show_only.append(n)\n show_all = not show_only\n for n in filter(match_info,dir(system_info)):\n if n in ['system_info','get_info']: continue\n if not show_all:\n if n not in show_only: continue\n del show_only[show_only.index(n)]\n c = getattr(system_info,n)()\n c.verbosity = 2\n r = c.get_info()\n if show_only:\n print 'Info classes not defined:',','.join(show_only)\nif __name__ == \"__main__\":\n show_all()\n", "methods": [ { "name": "get_info", "long_name": "get_info( name , notfound_action = 0 )", "filename": "system_info.py", "nloc": 43, "complexity": 1, "token_count": 194, "parameters": [ "name", "notfound_action" ], "start_line": 143, "end_line": 191, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 49, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , default_lib_dirs = default_lib_dirs , default_include_dirs = default_include_dirs , verbosity = 1 , )", "filename": "system_info.py", "nloc": 26, "complexity": 3, "token_count": 210, "parameters": [ "self", "default_lib_dirs", "default_include_dirs", "verbosity" ], "start_line": 271, "end_line": 296, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "calc_libraries_info", "long_name": "calc_libraries_info( self )", "filename": "system_info.py", "nloc": 15, "complexity": 5, "token_count": 78, "parameters": [ "self" ], "start_line": 298, "end_line": 312, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 15, "top_nesting_level": 1 }, { "name": "set_info", "long_name": "set_info( self , ** info )", "filename": "system_info.py", "nloc": 5, "complexity": 2, "token_count": 37, "parameters": [ "self", "info" ], "start_line": 314, "end_line": 318, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "has_info", "long_name": "has_info( self )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 18, "parameters": [ "self" ], "start_line": 320, "end_line": 321, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_info", "long_name": "get_info( self , notfound_action = 0 )", "filename": "system_info.py", "nloc": 30, "complexity": 15, "token_count": 206, "parameters": [ "self", "notfound_action" ], "start_line": 323, "end_line": 358, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 47, "complexity": 20, "token_count": 405, "parameters": [ "self", "section", "key" ], "start_line": 360, "end_line": 406, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 47, "top_nesting_level": 1 }, { "name": "get_lib_dirs", "long_name": "get_lib_dirs( self , key = 'library_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 408, "end_line": 409, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_include_dirs", "long_name": "get_include_dirs( self , key = 'include_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 411, "end_line": 412, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_src_dirs", "long_name": "get_src_dirs( self , key = 'src_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 414, "end_line": 415, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_libs", "long_name": "get_libs( self , key , default )", "filename": "system_info.py", "nloc": 10, "complexity": 7, "token_count": 79, "parameters": [ "self", "key", "default" ], "start_line": 417, "end_line": 426, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self , key = 'libraries' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 18, "parameters": [ "self", "key" ], "start_line": 428, "end_line": 429, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "check_libs", "long_name": "check_libs( self , lib_dir , libs , opt_libs = [ ] )", "filename": "system_info.py", "nloc": 11, "complexity": 5, "token_count": 77, "parameters": [ "self", "lib_dir", "libs", "opt_libs" ], "start_line": 431, "end_line": 443, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "_lib_list", "long_name": "_lib_list( self , lib_dir , libs , ext )", "filename": "system_info.py", "nloc": 9, "complexity": 3, "token_count": 65, "parameters": [ "self", "lib_dir", "libs", "ext" ], "start_line": 445, "end_line": 453, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "_extract_lib_names", "long_name": "_extract_lib_names( self , libs )", "filename": "system_info.py", "nloc": 3, "complexity": 2, "token_count": 37, "parameters": [ "self", "libs" ], "start_line": 455, "end_line": 457, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "_check_libs", "long_name": "_check_libs( self , lib_dir , libs , opt_libs , ext )", "filename": "system_info.py", "nloc": 10, "complexity": 3, "token_count": 99, "parameters": [ "self", "lib_dir", "libs", "opt_libs", "ext" ], "start_line": 459, "end_line": 468, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "combine_paths", "long_name": "combine_paths( self , * args )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 23, "parameters": [ "self", "args" ], "start_line": 470, "end_line": 471, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 11, "parameters": [ "self" ], "start_line": 481, "end_line": 482, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 26, "complexity": 8, "token_count": 150, "parameters": [ "self" ], "start_line": 484, "end_line": 509, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 551, "end_line": 556, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 22, "complexity": 7, "token_count": 140, "parameters": [ "self" ], "start_line": 558, "end_line": 579, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 7, "complexity": 4, "token_count": 74, "parameters": [ "self", "section", "key" ], "start_line": 594, "end_line": 600, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 78, "complexity": 17, "token_count": 441, "parameters": [ "self" ], "start_line": 602, "end_line": 682, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 81, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 21, "complexity": 6, "token_count": 138, "parameters": [ "self" ], "start_line": 687, "end_line": 709, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 12, "complexity": 3, "token_count": 68, "parameters": [ "self" ], "start_line": 732, "end_line": 744, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 68, "parameters": [ "self", "section", "key" ], "start_line": 751, "end_line": 756, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 81, "complexity": 10, "token_count": 232, "parameters": [ "self" ], "start_line": 758, "end_line": 842, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 85, "top_nesting_level": 1 }, { "name": "get_atlas_version.atlas_version_c", "long_name": "get_atlas_version.atlas_version_c( extension , build_dir , magic = magic )", "filename": "system_info.py", "nloc": 10, "complexity": 3, "token_count": 74, "parameters": [ "extension", "build_dir", "magic" ], "start_line": 866, "end_line": 875, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "get_atlas_version", "long_name": "get_atlas_version( ** config )", "filename": "system_info.py", "nloc": 45, "complexity": 9, "token_count": 289, "parameters": [ "config" ], "start_line": 861, "end_line": 915, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 55, "top_nesting_level": 0 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 68, "complexity": 19, "token_count": 434, "parameters": [ "self" ], "start_line": 920, "end_line": 995, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 76, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 49, "complexity": 13, "token_count": 316, "parameters": [ "self" ], "start_line": 1000, "end_line": 1052, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 53, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 12, "complexity": 3, "token_count": 68, "parameters": [ "self" ], "start_line": 1061, "end_line": 1073, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1081, "end_line": 1086, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 34, "complexity": 5, "token_count": 106, "parameters": [ "self" ], "start_line": 1088, "end_line": 1123, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 4, "complexity": 1, "token_count": 19, "parameters": [ "self" ], "start_line": 1129, "end_line": 1132, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 20, "complexity": 7, "token_count": 114, "parameters": [ "self" ], "start_line": 1134, "end_line": 1153, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 22, "complexity": 6, "token_count": 139, "parameters": [ "self" ], "start_line": 1160, "end_line": 1181, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 22, "complexity": 6, "token_count": 121, "parameters": [ "self" ], "start_line": 1183, "end_line": 1211, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1221, "end_line": 1226, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 21, "complexity": 5, "token_count": 156, "parameters": [ "self" ], "start_line": 1228, "end_line": 1248, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 21, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1254, "end_line": 1259, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 22, "complexity": 6, "token_count": 177, "parameters": [ "self" ], "start_line": 1261, "end_line": 1283, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "get_config_exe", "long_name": "get_config_exe( self )", "filename": "system_info.py", "nloc": 4, "complexity": 2, "token_count": 30, "parameters": [ "self" ], "start_line": 1295, "end_line": 1298, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "get_config_output", "long_name": "get_config_output( self , config_exe , option )", "filename": "system_info.py", "nloc": 4, "complexity": 2, "token_count": 37, "parameters": [ "self", "config_exe", "option" ], "start_line": 1299, "end_line": 1302, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 54, "complexity": 22, "token_count": 435, "parameters": [ "self" ], "start_line": 1304, "end_line": 1357, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 54, "top_nesting_level": 1 }, { "name": "combine_paths", "long_name": "combine_paths( * args , ** kws )", "filename": "system_info.py", "nloc": 22, "complexity": 11, "token_count": 195, "parameters": [ "args", "kws" ], "start_line": 1426, "end_line": 1450, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 0 }, { "name": "dict_append", "long_name": "dict_append( d , ** kws )", "filename": "system_info.py", "nloc": 17, "complexity": 9, "token_count": 128, "parameters": [ "d", "kws" ], "start_line": 1454, "end_line": 1470, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "show_all", "long_name": "show_all( )", "filename": "system_info.py", "nloc": 20, "complexity": 8, "token_count": 137, "parameters": [], "start_line": 1472, "end_line": 1491, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 } ], "methods_before": [ { "name": "get_info", "long_name": "get_info( name , notfound_action = 0 )", "filename": "system_info.py", "nloc": 43, "complexity": 1, "token_count": 194, "parameters": [ "name", "notfound_action" ], "start_line": 143, "end_line": 191, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 49, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , default_lib_dirs = default_lib_dirs , default_include_dirs = default_include_dirs , verbosity = 1 , )", "filename": "system_info.py", "nloc": 26, "complexity": 3, "token_count": 206, "parameters": [ "self", "default_lib_dirs", "default_include_dirs", "verbosity" ], "start_line": 271, "end_line": 296, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "calc_libraries_info", "long_name": "calc_libraries_info( self )", "filename": "system_info.py", "nloc": 15, "complexity": 5, "token_count": 78, "parameters": [ "self" ], "start_line": 298, "end_line": 312, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 15, "top_nesting_level": 1 }, { "name": "set_info", "long_name": "set_info( self , ** info )", "filename": "system_info.py", "nloc": 5, "complexity": 2, "token_count": 37, "parameters": [ "self", "info" ], "start_line": 314, "end_line": 318, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "has_info", "long_name": "has_info( self )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 18, "parameters": [ "self" ], "start_line": 320, "end_line": 321, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_info", "long_name": "get_info( self , notfound_action = 0 )", "filename": "system_info.py", "nloc": 30, "complexity": 15, "token_count": 206, "parameters": [ "self", "notfound_action" ], "start_line": 323, "end_line": 358, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 47, "complexity": 20, "token_count": 405, "parameters": [ "self", "section", "key" ], "start_line": 360, "end_line": 406, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 47, "top_nesting_level": 1 }, { "name": "get_lib_dirs", "long_name": "get_lib_dirs( self , key = 'library_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 408, "end_line": 409, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_include_dirs", "long_name": "get_include_dirs( self , key = 'include_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 411, "end_line": 412, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_src_dirs", "long_name": "get_src_dirs( self , key = 'src_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 414, "end_line": 415, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_libs", "long_name": "get_libs( self , key , default )", "filename": "system_info.py", "nloc": 10, "complexity": 7, "token_count": 79, "parameters": [ "self", "key", "default" ], "start_line": 417, "end_line": 426, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self , key = 'libraries' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 18, "parameters": [ "self", "key" ], "start_line": 428, "end_line": 429, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "check_libs", "long_name": "check_libs( self , lib_dir , libs , opt_libs = [ ] )", "filename": "system_info.py", "nloc": 11, "complexity": 5, "token_count": 77, "parameters": [ "self", "lib_dir", "libs", "opt_libs" ], "start_line": 431, "end_line": 443, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "_lib_list", "long_name": "_lib_list( self , lib_dir , libs , ext )", "filename": "system_info.py", "nloc": 9, "complexity": 3, "token_count": 65, "parameters": [ "self", "lib_dir", "libs", "ext" ], "start_line": 445, "end_line": 453, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "_extract_lib_names", "long_name": "_extract_lib_names( self , libs )", "filename": "system_info.py", "nloc": 3, "complexity": 2, "token_count": 37, "parameters": [ "self", "libs" ], "start_line": 455, "end_line": 457, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "_check_libs", "long_name": "_check_libs( self , lib_dir , libs , opt_libs , ext )", "filename": "system_info.py", "nloc": 10, "complexity": 3, "token_count": 99, "parameters": [ "self", "lib_dir", "libs", "opt_libs", "ext" ], "start_line": 459, "end_line": 468, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "combine_paths", "long_name": "combine_paths( self , * args )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 23, "parameters": [ "self", "args" ], "start_line": 470, "end_line": 471, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 11, "parameters": [ "self" ], "start_line": 481, "end_line": 482, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 26, "complexity": 8, "token_count": 150, "parameters": [ "self" ], "start_line": 484, "end_line": 509, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 551, "end_line": 556, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 22, "complexity": 7, "token_count": 140, "parameters": [ "self" ], "start_line": 558, "end_line": 579, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 7, "complexity": 4, "token_count": 74, "parameters": [ "self", "section", "key" ], "start_line": 594, "end_line": 600, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 78, "complexity": 17, "token_count": 441, "parameters": [ "self" ], "start_line": 602, "end_line": 682, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 81, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 21, "complexity": 6, "token_count": 138, "parameters": [ "self" ], "start_line": 687, "end_line": 709, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 12, "complexity": 3, "token_count": 68, "parameters": [ "self" ], "start_line": 732, "end_line": 744, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 68, "parameters": [ "self", "section", "key" ], "start_line": 751, "end_line": 756, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 81, "complexity": 10, "token_count": 232, "parameters": [ "self" ], "start_line": 758, "end_line": 842, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 85, "top_nesting_level": 1 }, { "name": "get_atlas_version.atlas_version_c", "long_name": "get_atlas_version.atlas_version_c( extension , build_dir , magic = magic )", "filename": "system_info.py", "nloc": 10, "complexity": 3, "token_count": 74, "parameters": [ "extension", "build_dir", "magic" ], "start_line": 866, "end_line": 875, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "get_atlas_version", "long_name": "get_atlas_version( ** config )", "filename": "system_info.py", "nloc": 45, "complexity": 9, "token_count": 289, "parameters": [ "config" ], "start_line": 861, "end_line": 915, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 55, "top_nesting_level": 0 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 68, "complexity": 19, "token_count": 434, "parameters": [ "self" ], "start_line": 920, "end_line": 995, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 76, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 49, "complexity": 13, "token_count": 316, "parameters": [ "self" ], "start_line": 1000, "end_line": 1052, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 53, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 12, "complexity": 3, "token_count": 68, "parameters": [ "self" ], "start_line": 1061, "end_line": 1073, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1081, "end_line": 1086, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 34, "complexity": 5, "token_count": 106, "parameters": [ "self" ], "start_line": 1088, "end_line": 1123, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 4, "complexity": 1, "token_count": 19, "parameters": [ "self" ], "start_line": 1129, "end_line": 1132, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 20, "complexity": 7, "token_count": 114, "parameters": [ "self" ], "start_line": 1134, "end_line": 1153, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 22, "complexity": 6, "token_count": 139, "parameters": [ "self" ], "start_line": 1160, "end_line": 1181, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 22, "complexity": 6, "token_count": 121, "parameters": [ "self" ], "start_line": 1183, "end_line": 1211, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1221, "end_line": 1226, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 21, "complexity": 5, "token_count": 156, "parameters": [ "self" ], "start_line": 1228, "end_line": 1248, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 21, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1254, "end_line": 1259, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 22, "complexity": 6, "token_count": 177, "parameters": [ "self" ], "start_line": 1261, "end_line": 1283, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "get_config_exe", "long_name": "get_config_exe( self )", "filename": "system_info.py", "nloc": 4, "complexity": 2, "token_count": 30, "parameters": [ "self" ], "start_line": 1295, "end_line": 1298, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "get_config_output", "long_name": "get_config_output( self , config_exe , option )", "filename": "system_info.py", "nloc": 4, "complexity": 2, "token_count": 37, "parameters": [ "self", "config_exe", "option" ], "start_line": 1299, "end_line": 1302, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 54, "complexity": 22, "token_count": 435, "parameters": [ "self" ], "start_line": 1304, "end_line": 1357, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 54, "top_nesting_level": 1 }, { "name": "combine_paths", "long_name": "combine_paths( * args , ** kws )", "filename": "system_info.py", "nloc": 22, "complexity": 11, "token_count": 195, "parameters": [ "args", "kws" ], "start_line": 1426, "end_line": 1450, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 0 }, { "name": "dict_append", "long_name": "dict_append( d , ** kws )", "filename": "system_info.py", "nloc": 17, "complexity": 9, "token_count": 128, "parameters": [ "d", "kws" ], "start_line": 1454, "end_line": 1470, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "show_all", "long_name": "show_all( )", "filename": "system_info.py", "nloc": 20, "complexity": 8, "token_count": 137, "parameters": [], "start_line": 1472, "end_line": 1491, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "__init__", "long_name": "__init__( self , default_lib_dirs = default_lib_dirs , default_include_dirs = default_include_dirs , verbosity = 1 , )", "filename": "system_info.py", "nloc": 26, "complexity": 3, "token_count": 210, "parameters": [ "self", "default_lib_dirs", "default_include_dirs", "verbosity" ], "start_line": 271, "end_line": 296, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 } ], "nloc": 1320, "complexity": 285, "token_count": 7306, "diff_parsed": { "added": [ " f = __file__", " except NameError,msg:", " f = sys.argv[0]", " cf = os.path.join(os.path.split(os.path.abspath(f))[0]," ], "deleted": [ " __file__", " except NameError:", " __file__ = sys.argv[0]", " cf = os.path.join(os.path.split(os.path.abspath(__file__))[0]," ] } } ] }, { "hash": "ebf0bc4e3e0fd00d0fd0c86faf2744004d4c4346", "msg": "Added g95 compiler support (thanks to Bill McLean)", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-03-22T07:04:22+00:00", "author_timezone": 0, "committer_date": "2005-03-22T07:04:22+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "d851d5a9e8d6689ee28e8be873b30d631cff7039" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 5, "insertions": 48, "lines": 53, "files": 2, "dmm_unit_size": 1.0, "dmm_unit_complexity": 1.0, "dmm_unit_interfacing": 1.0, "modified_files": [ { "old_path": "scipy_distutils/fcompiler.py", "new_path": "scipy_distutils/fcompiler.py", "filename": "fcompiler.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -641,6 +641,8 @@ def __get_flags(self, command, envvar=None, confvar=None):\n \n fcompiler_class = {'gnu':('gnufcompiler','GnuFCompiler',\n \"GNU Fortran Compiler\"),\n+ 'g95':('g95fcompiler','G95FCompiler',\n+ \"GNU Fortran 95 Compiler\"),\n 'pg':('pgfcompiler','PGroupFCompiler',\n \"Portland Group Fortran Compiler\"),\n 'absoft':('absoftfcompiler','AbsoftFCompiler',\n@@ -677,12 +679,12 @@ def __get_flags(self, command, envvar=None, confvar=None):\n \n _default_compilers = (\n # Platform mappings\n- ('win32',('gnu','intelv','absoft','compaqv','intelev')),\n- ('cygwin.*',('gnu','intelv','absoft','compaqv','intelev')),\n+ ('win32',('gnu','intelv','absoft','compaqv','intelev','g95')),\n+ ('cygwin.*',('gnu','intelv','absoft','compaqv','intelev','g95')),\n ('linux.*',('gnu','intel','lahey','pg','absoft','nag','vast','compaq',\n- 'intele')),\n- ('darwin.*',('nag','absoft','ibm','gnu')),\n- ('sunos.*',('forte','gnu','sun')),\n+ 'intele','g95')),\n+ ('darwin.*',('nag','absoft','ibm','gnu','g95')),\n+ ('sunos.*',('forte','gnu','sun','g95')),\n ('irix.*',('mips','gnu')),\n ('aix.*',('ibm','gnu')),\n # OS mappings\n", "added_lines": 7, "deleted_lines": 5, "source_code": "\"\"\"scipy_distutils.fcompiler\n\nContains FCompiler, an abstract base class that defines the interface\nfor the Scipy_distutils Fortran compiler abstraction model.\n\n\"\"\"\n\nimport re\nimport os\nimport sys\nimport atexit\nfrom types import StringType, NoneType, ListType, TupleType\nfrom glob import glob\n\nfrom distutils.version import StrictVersion\nfrom scipy_distutils.ccompiler import CCompiler, gen_lib_options\n# distutils.ccompiler provides the following functions:\n# gen_preprocess_options(macros, include_dirs)\n# gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries)\nfrom distutils.errors import DistutilsModuleError,DistutilsArgError,\\\n DistutilsExecError,CompileError,LinkError,DistutilsPlatformError\nfrom distutils.core import Command\nfrom distutils.util import split_quoted\nfrom distutils.fancy_getopt import FancyGetopt\nfrom distutils.sysconfig import get_config_var\nfrom distutils.spawn import _nt_quote_args \n\n\nfrom scipy_distutils.command.config_compiler import config_fc\n\nimport log\nfrom misc_util import compiler_to_string, cyg2win32\nfrom exec_command import find_executable, exec_command\n\nclass FCompiler(CCompiler):\n \"\"\" Abstract base class to define the interface that must be implemented\n by real Fortran compiler classes.\n\n Methods that subclasses may redefine:\n\n get_version_cmd(), get_linker_so(), get_version()\n get_flags(), get_flags_opt(), get_flags_arch(), get_flags_debug()\n get_flags_f77(), get_flags_opt_f77(), get_flags_arch_f77(),\n get_flags_debug_f77(), get_flags_f90(), get_flags_opt_f90(),\n get_flags_arch_f90(), get_flags_debug_f90(),\n get_flags_fix(), get_flags_linker_so(), get_flags_version()\n\n DON'T call these methods (except get_version) after\n constructing a compiler instance or inside any other method.\n All methods, except get_version_cmd() and get_flags_version(), may\n call get_version() method.\n\n After constructing a compiler instance, always call customize(dist=None)\n method that finalizes compiler construction and makes the following\n attributes available:\n compiler_f77\n compiler_f90\n compiler_fix\n linker_so\n archiver\n ranlib\n libraries\n library_dirs\n \"\"\"\n # CCompiler defines the following attributes:\n # compiler_type\n # src_extensions\n # obj_extension\n # static_lib_extension\n # shared_lib_extension\n # static_lib_format\n # shared_lib_format\n # exe_extension\n # language_map ### REDEFINED\n # language_order ### REDEFINED\n # and the following public methods:\n # set_executables(**args)\n # set_executable(key,value)\n # define_macro(name, value=None)\n # undefine_macro(name)\n # add_include_dir(dir)\n # set_include_dirs(dirs)\n # add_library(libname)\n # set_libraries(libnames)\n # add_library_dir(dir)\n # set_library_dirs(dirs)\n # add_runtime_library_dir(dir)\n # set_runtime_library_dirs(dirs)\n # add_link_object(object)\n # set_link_objects(objects)\n #\n # detect_language(sources) ### USABLE\n #\n # preprocess(source,output_file=None,macros=None,include_dirs=None,\n # extra_preargs=None,extra_postargs=None)\n # compile(sources, output_dir=None, macros=None,\n # include_dirs=None, debug=0, extra_preargs=None,\n # extra_postargs=None, depends=None)\n # create_static_lib(objects,output_libname,output_dir=None,debug=0,target_lang=None):\n # link(target_desc, objects, output_filename, output_dir=None,\n # libraries=None, library_dirs=None, runtime_library_dirs=None,\n # export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None,\n # build_temp=None, target_lang=None)\n # link_shared_lib(objects, output_libname, output_dir=None,\n # libraries=None, library_dirs=None, runtime_library_dirs=None,\n # export_symbols=None, debug=0, extra_preargs=None,\n # extra_postargs=None, build_temp=None, target_lang=None)\n # link_shared_object(objects,output_filename,output_dir=None,\n # libraries=None,library_dirs=None,runtime_library_dirs=None,\n # export_symbols=None,debug=0,extra_preargs=None,\n # extra_postargs=None,build_temp=None,target_lang=None)\n # link_executable(objects,output_progname,output_dir=None,\n # libraries=None,library_dirs=None,runtime_library_dirs=None,\n # debug=0,extra_preargs=None,extra_postargs=None,target_lang=None)\n #\n # library_dir_option(dir)\n # runtime_library_dir_option(dir)\n # library_option(lib)\n # has_function(funcname,includes=None,include_dirs=None,\n # libraries=None,library_dirs=None)\n # find_library_file(dirs, lib, debug=0)\n #\n # object_filenames(source_filenames, strip_dir=0, output_dir='')\n # shared_object_filename(basename, strip_dir=0, output_dir='')\n # executable_filenamee(basename, strip_dir=0, output_dir='')\n # library_filename(libname, lib_type='static',strip_dir=0, output_dir=''):\n #\n # announce(msg, level=1)\n # debug_print(msg)\n # warn(msg)\n # execute(func, args, msg=None, level=1)\n # spawn(cmd)\n # move_file(src,dst)\n # mkpath(name, mode=0777)\n #\n\n language_map = {'.f':'f77',\n '.for':'f77',\n '.F':'f77', # XXX: needs preprocessor\n '.ftn':'f77',\n '.f77':'f77',\n '.f90':'f90',\n '.F90':'f90', # XXX: needs preprocessor\n '.f95':'f90',\n }\n language_order = ['f90','f77']\n\n version_pattern = None\n\n executables = {\n 'version_cmd' : [\"f77\",\"-v\"],\n 'compiler_f77' : [\"f77\"],\n 'compiler_f90' : [\"f90\"],\n 'compiler_fix' : [\"f90\",\"-fixed\"],\n 'linker_so' : [\"f90\",\"-shared\"],\n #'linker_exe' : [\"f90\"], # XXX do we need it??\n 'archiver' : [\"ar\",\"-cr\"],\n 'ranlib' : None,\n }\n\n compile_switch = \"-c\"\n object_switch = \"-o \" # Ending space matters! It will be stripped\n # but if it is missing then object_switch\n # will be prefixed to object file name by\n # string concatenation.\n library_switch = \"-o \" # Ditto!\n\n # Switch to specify where module files are created and searched\n # for USE statement. Normally it is a string and also here ending\n # space matters. See above.\n module_dir_switch = None\n\n # Switch to specify where module files are searched for USE statement.\n module_include_switch = '-I' \n\n pic_flags = [] # Flags to create position-independent code\n\n src_extensions = ['.for','.ftn','.f77','.f','.f90','.f95','.F','.F90']\n obj_extension = \".o\"\n shared_lib_extension = get_config_var('SO') # or .dll\n static_lib_extension = \".a\" # or .lib\n static_lib_format = \"lib%s%s\" # or %s%s\n shared_lib_format = \"%s%s\"\n exe_extension = \"\"\n\n ######################################################################\n ## Methods that subclasses may redefine. But don't call these methods!\n ## They are private to FCompiler class and may return unexpected\n ## results if used elsewhere. So, you have been warned..\n\n def get_version_cmd(self):\n \"\"\" Compiler command to print out version information. \"\"\"\n f77 = self.executables['compiler_f77']\n if f77 is not None:\n f77 = f77[0]\n cmd = self.executables['version_cmd']\n if cmd is not None:\n cmd = cmd[0]\n if cmd==f77:\n cmd = self.compiler_f77[0]\n else:\n f90 = self.executables['compiler_f90']\n if f90 is not None:\n f90 = f90[0]\n if cmd==f90:\n cmd = self.compiler_f90[0]\n return cmd\n\n def get_linker_so(self):\n \"\"\" Linker command to build shared libraries. \"\"\"\n f77 = self.executables['compiler_f77']\n if f77 is not None:\n f77 = f77[0]\n ln = self.executables['linker_so']\n if ln is not None:\n ln = ln[0]\n if ln==f77:\n ln = self.compiler_f77[0]\n else:\n f90 = self.executables['compiler_f90']\n if f90 is not None:\n f90 = f90[0]\n if ln==f90:\n ln = self.compiler_f90[0]\n return ln\n\n def get_flags(self):\n \"\"\" List of flags common to all compiler types. \"\"\"\n return [] + self.pic_flags\n def get_flags_version(self):\n \"\"\" List of compiler flags to print out version information. \"\"\"\n if self.executables['version_cmd']:\n return self.executables['version_cmd'][1:]\n return []\n def get_flags_f77(self):\n \"\"\" List of Fortran 77 specific flags. \"\"\"\n if self.executables['compiler_f77']:\n return self.executables['compiler_f77'][1:]\n return []\n def get_flags_f90(self):\n \"\"\" List of Fortran 90 specific flags. \"\"\"\n if self.executables['compiler_f90']:\n return self.executables['compiler_f90'][1:]\n return []\n def get_flags_free(self):\n \"\"\" List of Fortran 90 free format specific flags. \"\"\"\n return []\n def get_flags_fix(self):\n \"\"\" List of Fortran 90 fixed format specific flags. \"\"\"\n if self.executables['compiler_fix']:\n return self.executables['compiler_fix'][1:]\n return []\n def get_flags_linker_so(self):\n \"\"\" List of linker flags to build a shared library. \"\"\"\n if self.executables['linker_so']:\n return self.executables['linker_so'][1:]\n return []\n def get_flags_ar(self):\n \"\"\" List of archiver flags. \"\"\"\n if self.executables['archiver']:\n return self.executables['archiver'][1:]\n return []\n def get_flags_opt(self):\n \"\"\" List of architecture independent compiler flags. \"\"\"\n return []\n def get_flags_arch(self):\n \"\"\" List of architecture dependent compiler flags. \"\"\"\n return []\n def get_flags_debug(self):\n \"\"\" List of compiler flags to compile with debugging information. \"\"\"\n return []\n get_flags_opt_f77 = get_flags_opt_f90 = get_flags_opt\n get_flags_arch_f77 = get_flags_arch_f90 = get_flags_arch\n get_flags_debug_f77 = get_flags_debug_f90 = get_flags_debug\n\n def get_libraries(self):\n \"\"\" List of compiler libraries. \"\"\"\n return self.libraries[:]\n def get_library_dirs(self):\n \"\"\" List of compiler library directories. \"\"\"\n return self.library_dirs[:]\n\n ############################################################\n\n ## Public methods:\n\n def customize(self, dist=None):\n \"\"\" Customize Fortran compiler.\n\n This method gets Fortran compiler specific information from\n (i) class definition, (ii) environment, (iii) distutils config\n files, and (iv) command line.\n\n This method should be always called after constructing a\n compiler instance. But not in __init__ because Distribution\n instance is needed for (iii) and (iv).\n \"\"\"\n log.info('customize %s' % (self.__class__.__name__))\n if dist is None:\n # These hooks are for testing only!\n from dist import Distribution\n dist = Distribution()\n dist.script_name = os.path.basename(sys.argv[0])\n dist.script_args = ['config_fc'] + sys.argv[1:]\n dist.cmdclass['config_fc'] = config_fc\n dist.parse_config_files()\n dist.parse_command_line()\n\n conf = dist.get_option_dict('config_fc')\n noopt = conf.get('noopt',[None,0])[1]\n if 0: # change to `if 1:` when making release.\n # Don't use architecture dependent compiler flags:\n noarch = 1\n else:\n noarch = conf.get('noarch',[None,noopt])[1]\n debug = conf.get('debug',[None,0])[1]\n\n f77 = self.__get_cmd('compiler_f77','F77',(conf,'f77exec'))\n f90 = self.__get_cmd('compiler_f90','F90',(conf,'f90exec'))\n # Temporarily setting f77,f90 compilers so that\n # version_cmd can use their executables.\n if f77:\n self.set_executables(compiler_f77=[f77])\n if f90:\n self.set_executables(compiler_f90=[f90])\n\n # Must set version_cmd before others as self.get_flags*\n # methods may call self.get_version.\n vers_cmd = self.__get_cmd(self.get_version_cmd)\n if vers_cmd:\n vflags = self.__get_flags(self.get_flags_version)\n self.set_executables(version_cmd=[vers_cmd]+vflags)\n\n if f77:\n f77flags = self.__get_flags(self.get_flags_f77,'F77FLAGS',\n (conf,'f77flags'))\n if f90:\n f90flags = self.__get_flags(self.get_flags_f90,'F90FLAGS',\n (conf,'f90flags'))\n freeflags = self.__get_flags(self.get_flags_free,'FREEFLAGS',\n (conf,'freeflags'))\n # XXX Assuming that free format is default for f90 compiler.\n fix = self.__get_cmd('compiler_fix','F90',(conf,'f90exec'))\n if fix:\n fixflags = self.__get_flags(self.get_flags_fix) + f90flags\n\n oflags,aflags,dflags = [],[],[]\n if not noopt:\n oflags = self.__get_flags(self.get_flags_opt,'FOPT',(conf,'opt'))\n if f77 and self.get_flags_opt is not self.get_flags_opt_f77:\n f77flags += self.__get_flags(self.get_flags_opt_f77)\n if f90 and self.get_flags_opt is not self.get_flags_opt_f90:\n f90flags += self.__get_flags(self.get_flags_opt_f90)\n if fix and self.get_flags_opt is not self.get_flags_opt_f90:\n fixflags += self.__get_flags(self.get_flags_opt_f90)\n if not noarch:\n aflags = self.__get_flags(self.get_flags_arch,'FARCH',\n (conf,'arch'))\n if f77 and self.get_flags_arch is not self.get_flags_arch_f77:\n f77flags += self.__get_flags(self.get_flags_arch_f77)\n if f90 and self.get_flags_arch is not self.get_flags_arch_f90:\n f90flags += self.__get_flags(self.get_flags_arch_f90)\n if fix and self.get_flags_arch is not self.get_flags_arch_f90:\n fixflags += self.__get_flags(self.get_flags_arch_f90)\n if debug:\n dflags = self.__get_flags(self.get_flags_debug,'FDEBUG')\n if f77 and self.get_flags_debug is not self.get_flags_debug_f77:\n f77flags += self.__get_flags(self.get_flags_debug_f77)\n if f90 and self.get_flags_debug is not self.get_flags_debug_f90:\n f90flags += self.__get_flags(self.get_flags_debug_f90)\n if fix and self.get_flags_debug is not self.get_flags_debug_f90:\n fixflags += self.__get_flags(self.get_flags_debug_f90)\n\n fflags = self.__get_flags(self.get_flags,'FFLAGS') \\\n + dflags + oflags + aflags\n\n if f77:\n self.set_executables(compiler_f77=[f77]+f77flags+fflags)\n if f90:\n self.set_executables(compiler_f90=[f90]+freeflags+f90flags+fflags)\n if fix:\n self.set_executables(compiler_fix=[fix]+fixflags+fflags)\n\n #XXX: Do we need LDSHARED->SOSHARED, LDFLAGS->SOFLAGS\n linker_so = self.__get_cmd(self.get_linker_so,'LDSHARED')\n if linker_so:\n linker_so_flags = self.__get_flags(self.get_flags_linker_so,'LDFLAGS')\n self.set_executables(linker_so=[linker_so]+linker_so_flags)\n\n ar = self.__get_cmd('archiver','AR')\n if ar:\n arflags = self.__get_flags(self.get_flags_ar,'ARFLAGS')\n self.set_executables(archiver=[ar]+arflags)\n\n ranlib = self.__get_cmd('ranlib','RANLIB')\n if ranlib:\n self.set_executables(ranlib=[ranlib])\n\n self.set_library_dirs(self.get_library_dirs())\n self.set_libraries(self.get_libraries())\n\n verbose = conf.get('verbose',[None,0])[1]\n if verbose:\n self.dump_properties()\n return\n\n def dump_properties(self):\n \"\"\" Print out the attributes of a compiler instance. \"\"\"\n props = []\n for key in self.executables.keys() + \\\n ['version','libraries','library_dirs',\n 'object_switch','compile_switch']:\n if hasattr(self,key):\n v = getattr(self,key)\n props.append((key, None, '= '+`v`))\n props.sort()\n\n pretty_printer = FancyGetopt(props)\n for l in pretty_printer.generate_help(\"%s instance properties:\" \\\n % (self.__class__.__name__)):\n if l[:4]==' --':\n l = ' ' + l[4:]\n print l\n return\n\n ###################\n\n def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):\n \"\"\"Compile 'src' to product 'obj'.\"\"\"\n if is_f_file(src) and not has_f90_header(src):\n flavor = ':f77'\n compiler = self.compiler_f77\n elif is_free_format(src):\n flavor = ':f90'\n compiler = self.compiler_f90\n if compiler is None:\n raise DistutilsExecError, 'f90 not supported by '\\\n +self.__class__.__name__\n else:\n flavor = ':fix'\n compiler = self.compiler_fix\n if compiler is None:\n raise DistutilsExecError, 'f90 (fixed) not supported by '\\\n +self.__class__.__name__\n if self.object_switch[-1]==' ':\n o_args = [self.object_switch.strip(),obj]\n else:\n o_args = [self.object_switch.strip()+obj]\n\n assert self.compile_switch.strip()\n s_args = [self.compile_switch, src]\n\n if os.name == 'nt':\n compiler = _nt_quote_args(compiler)\n command = compiler + cc_args + s_args + o_args + extra_postargs\n\n display = '%s: %s' % (os.path.basename(compiler[0]) + flavor,\n src)\n try:\n self.spawn(command,display=display)\n except DistutilsExecError, msg:\n raise CompileError, msg\n\n return\n\n def module_options(self, module_dirs, module_build_dir):\n options = []\n if self.module_dir_switch is not None:\n if self.module_dir_switch[-1]==' ':\n options.extend([self.module_dir_switch.strip(),module_build_dir])\n else:\n options.append(self.module_dir_switch.strip()+module_build_dir)\n else:\n print 'XXX: module_build_dir=%r option ignored' % (module_build_dir)\n print 'XXX: Fix module_dir_switch for ',self.__class__.__name__\n if self.module_include_switch is not None:\n for d in [module_build_dir]+module_dirs:\n options.append('%s%s' % (self.module_include_switch, d))\n else:\n print 'XXX: module_dirs=%r option ignored' % (module_dirs)\n print 'XXX: Fix module_include_switch for ',self.__class__.__name__\n return options\n\n def library_option(self, lib):\n return \"-l\" + lib\n def library_dir_option(self, dir):\n return \"-L\" + dir\n\n# def _get_cc_args(self, pp_opts, debug, extra_preargs):\n# return []\n\n if sys.version[:3]<'2.3':\n def _get_cc_args(self, pp_opts, debug, before):\n # works for unixccompiler, emxccompiler, cygwinccompiler\n cc_args = pp_opts + ['-c']\n if debug:\n cc_args[:0] = ['-g']\n if before:\n cc_args[:0] = before\n return cc_args\n\n def compile(self, sources, output_dir=None, macros=None,\n include_dirs=None, debug=0, extra_preargs=None,\n extra_postargs=None, depends=None):\n if output_dir is None: output_dir = self.output_dir\n if macros is None: macros = self.macros\n elif type(macros) is ListType: macros = macros + (self.macros or [])\n if include_dirs is None: include_dirs = self.include_dirs\n elif type(include_dirs) in (ListType, TupleType):\n include_dirs = list(include_dirs) + (self.include_dirs or [])\n if extra_preargs is None: extra_preargs=[]\n\n display = []\n for fc in ['f77','f90','fix']:\n fcomp = getattr(self,'compiler_'+fc)\n if fcomp is None:\n continue\n display.append(\"%s(%s) options: '%s'\" \\\n % (os.path.basename(fcomp[0]),\n fc,\n ' '.join(fcomp[1:])))\n display = '\\n'.join(display)\n log.info(display)\n \n from distutils.sysconfig import python_build\n objects = self.object_filenames(sources,strip_dir=python_build,\n output_dir=output_dir)\n from distutils.ccompiler import gen_preprocess_options\n pp_opts = gen_preprocess_options(macros, include_dirs)\n build = {}\n for i in range(len(sources)):\n src,obj = sources[i],objects[i]\n ext = os.path.splitext(src)[1]\n self.mkpath(os.path.dirname(obj))\n build[obj] = src, ext\n cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)\n\n display = \"compile options: '%s'\" % (' '.join(cc_args))\n if extra_postargs:\n display += \"\\nextra options: '%s'\" % (' '.join(extra_postargs))\n log.info(display)\n\n objects_to_build = build.keys()\n for obj in objects:\n if obj in objects_to_build:\n src, ext = build[obj]\n if self.compiler_type=='absoft':\n obj = cyg2win32(obj)\n src = cyg2win32(src)\n self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)\n return objects\n def detect_language(self, sources):\n return\n\n def link(self, target_desc, objects,\n output_filename, output_dir=None, libraries=None,\n library_dirs=None, runtime_library_dirs=None,\n export_symbols=None, debug=0, extra_preargs=None,\n extra_postargs=None, build_temp=None, target_lang=None):\n objects, output_dir = self._fix_object_args(objects, output_dir)\n libraries, library_dirs, runtime_library_dirs = \\\n self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)\n\n lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,\n libraries)\n if type(output_dir) not in (StringType, NoneType):\n raise TypeError, \"'output_dir' must be a string or None\"\n if output_dir is not None:\n output_filename = os.path.join(output_dir, output_filename)\n\n if self._need_link(objects, output_filename):\n if self.library_switch[-1]==' ':\n o_args = [self.library_switch.strip(),output_filename]\n else:\n o_args = [self.library_switch.strip()+output_filename]\n \n if type(self.objects) is type(''):\n ld_args = objects + [self.objects]\n else:\n ld_args = objects + self.objects\n ld_args = ld_args + lib_opts + o_args\n if debug:\n ld_args[:0] = ['-g']\n if extra_preargs:\n ld_args[:0] = extra_preargs\n if extra_postargs:\n ld_args.extend(extra_postargs)\n self.mkpath(os.path.dirname(output_filename))\n if target_desc == CCompiler.EXECUTABLE:\n raise NotImplementedError,self.__class__.__name__+'.linker_exe attribute'\n else:\n linker = self.linker_so[:]\n if os.name == 'nt':\n linker = _nt_quote_args(linker)\n command = linker + ld_args\n try:\n self.spawn(command)\n except DistutilsExecError, msg:\n raise LinkError, msg\n else:\n log.debug(\"skipping %s (up-to-date)\", output_filename)\n return\n\n ############################################################\n\n ## Private methods:\n\n def __get_cmd(self, command, envvar=None, confvar=None):\n if command is None:\n var = None\n elif type(command) is type(''):\n var = self.executables[command]\n if var is not None:\n var = var[0]\n else:\n var = command()\n if envvar is not None:\n var = os.environ.get(envvar, var)\n if confvar is not None:\n var = confvar[0].get(confvar[1], [None,var])[1]\n return var\n\n def __get_flags(self, command, envvar=None, confvar=None):\n if command is None:\n var = []\n elif type(command) is type(''):\n var = self.executables[command][1:]\n else:\n var = command()\n if envvar is not None:\n var = os.environ.get(envvar, var)\n if confvar is not None:\n var = confvar[0].get(confvar[1], [None,var])[1]\n if type(var) is type(''):\n var = split_quoted(var)\n return var\n\n ## class FCompiler\n\n##############################################################################\n\nfcompiler_class = {'gnu':('gnufcompiler','GnuFCompiler',\n \"GNU Fortran Compiler\"),\n 'g95':('g95fcompiler','G95FCompiler',\n \"GNU Fortran 95 Compiler\"),\n 'pg':('pgfcompiler','PGroupFCompiler',\n \"Portland Group Fortran Compiler\"),\n 'absoft':('absoftfcompiler','AbsoftFCompiler',\n \"Absoft Corp Fortran Compiler\"),\n 'mips':('mipsfcompiler','MipsFCompiler',\n \"MIPSpro Fortran Compiler\"),\n 'sun':('sunfcompiler','SunFCompiler',\n \"Sun|Forte Fortran 95 Compiler\"),\n 'intel':('intelfcompiler','IntelFCompiler',\n \"Intel Fortran Compiler for 32-bit apps\"),\n 'intelv':('intelfcompiler','IntelVisualFCompiler',\n \"Intel Visual Fortran Compiler for 32-bit apps\"),\n 'intele':('intelfcompiler','IntelItaniumFCompiler',\n \"Intel Fortran Compiler for Itanium apps\"),\n 'intelev':('intelfcompiler','IntelItaniumVisualFCompiler',\n \"Intel Visual Fortran Compiler for Itanium apps\"),\n 'nag':('nagfcompiler','NAGFCompiler',\n \"NAGWare Fortran 95 Compiler\"),\n 'compaq':('compaqfcompiler','CompaqFCompiler',\n \"Compaq Fortran Compiler\"),\n 'compaqv':('compaqfcompiler','CompaqVisualFCompiler',\n \"DIGITAL|Compaq Visual Fortran Compiler\"),\n 'vast':('vastfcompiler','VastFCompiler',\n \"Pacific-Sierra Research Fortran 90 Compiler\"),\n 'hpux':('hpuxfcompiler','HPUXFCompiler',\n \"HP Fortran 90 Compiler\"),\n 'lahey':('laheyfcompiler','LaheyFCompiler',\n \"Lahey/Fujitsu Fortran 95 Compiler\"),\n 'ibm':('ibmfcompiler','IbmFCompiler',\n \"IBM XL Fortran Compiler\"),\n 'f':('fcompiler','FFCompiler',\n \"Fortran Company/NAG F Compiler\"),\n }\n\n_default_compilers = (\n # Platform mappings\n ('win32',('gnu','intelv','absoft','compaqv','intelev','g95')),\n ('cygwin.*',('gnu','intelv','absoft','compaqv','intelev','g95')),\n ('linux.*',('gnu','intel','lahey','pg','absoft','nag','vast','compaq',\n 'intele','g95')),\n ('darwin.*',('nag','absoft','ibm','gnu','g95')),\n ('sunos.*',('forte','gnu','sun','g95')),\n ('irix.*',('mips','gnu')),\n ('aix.*',('ibm','gnu')),\n # OS mappings\n ('posix',('gnu',)),\n ('nt',('gnu',)),\n ('mac',('gnu',)),\n )\n\ndef _find_existing_fcompiler(compilers, osname=None, platform=None):\n for compiler in compilers:\n v = None\n try:\n c = new_fcompiler(plat=platform, compiler=compiler)\n c.customize()\n v = c.get_version()\n except DistutilsModuleError:\n pass\n except Exception, msg:\n log.warn(msg)\n if v is not None:\n return compiler\n return\n\ndef get_default_fcompiler(osname=None, platform=None):\n \"\"\" Determine the default Fortran compiler to use for the given platform. \"\"\"\n if osname is None:\n osname = os.name\n if platform is None:\n platform = sys.platform\n matching_compilers = []\n for pattern, compiler in _default_compilers:\n if re.match(pattern, platform) is not None or \\\n re.match(pattern, osname) is not None:\n if type(compiler) is type(()):\n matching_compilers.extend(list(compiler))\n else:\n matching_compilers.append(compiler)\n if not matching_compilers:\n matching_compilers.append('gnu')\n compiler = _find_existing_fcompiler(matching_compilers,\n osname=osname,\n platform=platform)\n if compiler is not None:\n return compiler\n return matching_compilers[0]\n\ndef new_fcompiler(plat=None,\n compiler=None,\n verbose=0,\n dry_run=0,\n force=0):\n \"\"\" Generate an instance of some FCompiler subclass for the supplied\n platform/compiler combination.\n \"\"\"\n if plat is None:\n plat = os.name\n try:\n if compiler is None:\n compiler = get_default_fcompiler(plat)\n (module_name, class_name, long_description) = fcompiler_class[compiler]\n except KeyError:\n msg = \"don't know how to compile Fortran code on platform '%s'\" % plat\n if compiler is not None:\n msg = msg + \" with '%s' compiler.\" % compiler\n msg = msg + \" Supported compilers are: %s)\" \\\n % (','.join(fcompiler_class.keys()))\n raise DistutilsPlatformError, msg\n\n try:\n module_name = 'scipy_distutils.'+module_name\n __import__ (module_name)\n module = sys.modules[module_name]\n klass = vars(module)[class_name]\n except ImportError:\n raise DistutilsModuleError, \\\n \"can't compile Fortran code: unable to load module '%s'\" % \\\n module_name\n except KeyError:\n raise DistutilsModuleError, \\\n (\"can't compile Fortran code: unable to find class '%s' \" +\n \"in module '%s'\") % (class_name, module_name)\n compiler = klass(None, dry_run, force)\n log.debug('new_fcompiler returns %s' % (klass))\n return compiler\n\ndef show_fcompilers(dist = None):\n \"\"\" Print list of available compilers (used by the \"--help-fcompiler\"\n option to \"config_fc\").\n \"\"\"\n if dist is None:\n from dist import Distribution\n dist = Distribution()\n dist.script_name = os.path.basename(sys.argv[0])\n dist.script_args = ['config_fc'] + sys.argv[1:]\n dist.cmdclass['config_fc'] = config_fc\n dist.parse_config_files()\n dist.parse_command_line()\n\n compilers = []\n compilers_na = []\n compilers_ni = []\n for compiler in fcompiler_class.keys():\n v = 'N/A'\n try:\n c = new_fcompiler(compiler=compiler)\n c.customize(dist)\n v = c.get_version()\n except DistutilsModuleError:\n pass\n except Exception, msg:\n log.warn(msg)\n if v is None:\n compilers_na.append((\"fcompiler=\"+compiler, None,\n fcompiler_class[compiler][2]))\n elif v=='N/A':\n compilers_ni.append((\"fcompiler=\"+compiler, None,\n fcompiler_class[compiler][2]))\n else:\n compilers.append((\"fcompiler=\"+compiler, None,\n fcompiler_class[compiler][2] + ' (%s)' % v))\n compilers.sort()\n compilers_na.sort()\n pretty_printer = FancyGetopt(compilers)\n pretty_printer.print_help(\"List of available Fortran compilers:\")\n pretty_printer = FancyGetopt(compilers_na)\n pretty_printer.print_help(\"List of unavailable Fortran compilers:\")\n if compilers_ni:\n pretty_printer = FancyGetopt(compilers_ni)\n pretty_printer.print_help(\"List of unimplemented Fortran compilers:\")\n print \"For compiler details, run 'config_fc --verbose' setup command.\"\n\ndef dummy_fortran_file():\n import tempfile\n dummy_name = tempfile.mktemp()+'__dummy'\n dummy = open(dummy_name+'.f','w')\n dummy.write(\" subroutine dummy()\\n end\\n\")\n dummy.close()\n def rm_file(name=dummy_name,log_threshold=log._global_log.threshold):\n save_th = log._global_log.threshold\n log.set_threshold(log_threshold)\n try: os.remove(name+'.f'); log.debug('removed '+name+'.f')\n except OSError: pass\n try: os.remove(name+'.o'); log.debug('removed '+name+'.o')\n except OSError: pass\n log.set_threshold(save_th)\n atexit.register(rm_file)\n return dummy_name\n\nis_f_file = re.compile(r'.*[.](for|ftn|f77|f)\\Z',re.I).match\n_has_f_header = re.compile(r'-[*]-\\s*fortran\\s*-[*]-',re.I).search\n_has_f90_header = re.compile(r'-[*]-\\s*f90\\s*-[*]-',re.I).search\n_has_fix_header = re.compile(r'-[*]-\\s*fix\\s*-[*]-',re.I).search\n_free_f90_start = re.compile(r'[^c*]\\s*[^\\s\\d\\t]',re.I).match\ndef is_free_format(file):\n \"\"\"Check if file is in free format Fortran.\"\"\"\n # f90 allows both fixed and free format, assuming fixed unless\n # signs of free format are detected.\n result = 0\n f = open(file,'r')\n line = f.readline()\n n = 15 # the number of non-comment lines to scan for hints\n if _has_f_header(line):\n n = 0\n elif _has_f90_header(line):\n n = 0\n result = 1\n while n>0 and line:\n if line[0]!='!':\n n -= 1\n if (line[0]!='\\t' and _free_f90_start(line[:5])) or line[-2:-1]=='&':\n result = 1\n break\n line = f.readline()\n f.close()\n return result\n\ndef has_f90_header(src):\n f = open(src,'r')\n line = f.readline()\n f.close()\n return _has_f90_header(line) or _has_fix_header(line)\nif __name__ == '__main__':\n show_fcompilers()\n", "source_code_before": "\"\"\"scipy_distutils.fcompiler\n\nContains FCompiler, an abstract base class that defines the interface\nfor the Scipy_distutils Fortran compiler abstraction model.\n\n\"\"\"\n\nimport re\nimport os\nimport sys\nimport atexit\nfrom types import StringType, NoneType, ListType, TupleType\nfrom glob import glob\n\nfrom distutils.version import StrictVersion\nfrom scipy_distutils.ccompiler import CCompiler, gen_lib_options\n# distutils.ccompiler provides the following functions:\n# gen_preprocess_options(macros, include_dirs)\n# gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries)\nfrom distutils.errors import DistutilsModuleError,DistutilsArgError,\\\n DistutilsExecError,CompileError,LinkError,DistutilsPlatformError\nfrom distutils.core import Command\nfrom distutils.util import split_quoted\nfrom distutils.fancy_getopt import FancyGetopt\nfrom distutils.sysconfig import get_config_var\nfrom distutils.spawn import _nt_quote_args \n\n\nfrom scipy_distutils.command.config_compiler import config_fc\n\nimport log\nfrom misc_util import compiler_to_string, cyg2win32\nfrom exec_command import find_executable, exec_command\n\nclass FCompiler(CCompiler):\n \"\"\" Abstract base class to define the interface that must be implemented\n by real Fortran compiler classes.\n\n Methods that subclasses may redefine:\n\n get_version_cmd(), get_linker_so(), get_version()\n get_flags(), get_flags_opt(), get_flags_arch(), get_flags_debug()\n get_flags_f77(), get_flags_opt_f77(), get_flags_arch_f77(),\n get_flags_debug_f77(), get_flags_f90(), get_flags_opt_f90(),\n get_flags_arch_f90(), get_flags_debug_f90(),\n get_flags_fix(), get_flags_linker_so(), get_flags_version()\n\n DON'T call these methods (except get_version) after\n constructing a compiler instance or inside any other method.\n All methods, except get_version_cmd() and get_flags_version(), may\n call get_version() method.\n\n After constructing a compiler instance, always call customize(dist=None)\n method that finalizes compiler construction and makes the following\n attributes available:\n compiler_f77\n compiler_f90\n compiler_fix\n linker_so\n archiver\n ranlib\n libraries\n library_dirs\n \"\"\"\n # CCompiler defines the following attributes:\n # compiler_type\n # src_extensions\n # obj_extension\n # static_lib_extension\n # shared_lib_extension\n # static_lib_format\n # shared_lib_format\n # exe_extension\n # language_map ### REDEFINED\n # language_order ### REDEFINED\n # and the following public methods:\n # set_executables(**args)\n # set_executable(key,value)\n # define_macro(name, value=None)\n # undefine_macro(name)\n # add_include_dir(dir)\n # set_include_dirs(dirs)\n # add_library(libname)\n # set_libraries(libnames)\n # add_library_dir(dir)\n # set_library_dirs(dirs)\n # add_runtime_library_dir(dir)\n # set_runtime_library_dirs(dirs)\n # add_link_object(object)\n # set_link_objects(objects)\n #\n # detect_language(sources) ### USABLE\n #\n # preprocess(source,output_file=None,macros=None,include_dirs=None,\n # extra_preargs=None,extra_postargs=None)\n # compile(sources, output_dir=None, macros=None,\n # include_dirs=None, debug=0, extra_preargs=None,\n # extra_postargs=None, depends=None)\n # create_static_lib(objects,output_libname,output_dir=None,debug=0,target_lang=None):\n # link(target_desc, objects, output_filename, output_dir=None,\n # libraries=None, library_dirs=None, runtime_library_dirs=None,\n # export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None,\n # build_temp=None, target_lang=None)\n # link_shared_lib(objects, output_libname, output_dir=None,\n # libraries=None, library_dirs=None, runtime_library_dirs=None,\n # export_symbols=None, debug=0, extra_preargs=None,\n # extra_postargs=None, build_temp=None, target_lang=None)\n # link_shared_object(objects,output_filename,output_dir=None,\n # libraries=None,library_dirs=None,runtime_library_dirs=None,\n # export_symbols=None,debug=0,extra_preargs=None,\n # extra_postargs=None,build_temp=None,target_lang=None)\n # link_executable(objects,output_progname,output_dir=None,\n # libraries=None,library_dirs=None,runtime_library_dirs=None,\n # debug=0,extra_preargs=None,extra_postargs=None,target_lang=None)\n #\n # library_dir_option(dir)\n # runtime_library_dir_option(dir)\n # library_option(lib)\n # has_function(funcname,includes=None,include_dirs=None,\n # libraries=None,library_dirs=None)\n # find_library_file(dirs, lib, debug=0)\n #\n # object_filenames(source_filenames, strip_dir=0, output_dir='')\n # shared_object_filename(basename, strip_dir=0, output_dir='')\n # executable_filenamee(basename, strip_dir=0, output_dir='')\n # library_filename(libname, lib_type='static',strip_dir=0, output_dir=''):\n #\n # announce(msg, level=1)\n # debug_print(msg)\n # warn(msg)\n # execute(func, args, msg=None, level=1)\n # spawn(cmd)\n # move_file(src,dst)\n # mkpath(name, mode=0777)\n #\n\n language_map = {'.f':'f77',\n '.for':'f77',\n '.F':'f77', # XXX: needs preprocessor\n '.ftn':'f77',\n '.f77':'f77',\n '.f90':'f90',\n '.F90':'f90', # XXX: needs preprocessor\n '.f95':'f90',\n }\n language_order = ['f90','f77']\n\n version_pattern = None\n\n executables = {\n 'version_cmd' : [\"f77\",\"-v\"],\n 'compiler_f77' : [\"f77\"],\n 'compiler_f90' : [\"f90\"],\n 'compiler_fix' : [\"f90\",\"-fixed\"],\n 'linker_so' : [\"f90\",\"-shared\"],\n #'linker_exe' : [\"f90\"], # XXX do we need it??\n 'archiver' : [\"ar\",\"-cr\"],\n 'ranlib' : None,\n }\n\n compile_switch = \"-c\"\n object_switch = \"-o \" # Ending space matters! It will be stripped\n # but if it is missing then object_switch\n # will be prefixed to object file name by\n # string concatenation.\n library_switch = \"-o \" # Ditto!\n\n # Switch to specify where module files are created and searched\n # for USE statement. Normally it is a string and also here ending\n # space matters. See above.\n module_dir_switch = None\n\n # Switch to specify where module files are searched for USE statement.\n module_include_switch = '-I' \n\n pic_flags = [] # Flags to create position-independent code\n\n src_extensions = ['.for','.ftn','.f77','.f','.f90','.f95','.F','.F90']\n obj_extension = \".o\"\n shared_lib_extension = get_config_var('SO') # or .dll\n static_lib_extension = \".a\" # or .lib\n static_lib_format = \"lib%s%s\" # or %s%s\n shared_lib_format = \"%s%s\"\n exe_extension = \"\"\n\n ######################################################################\n ## Methods that subclasses may redefine. But don't call these methods!\n ## They are private to FCompiler class and may return unexpected\n ## results if used elsewhere. So, you have been warned..\n\n def get_version_cmd(self):\n \"\"\" Compiler command to print out version information. \"\"\"\n f77 = self.executables['compiler_f77']\n if f77 is not None:\n f77 = f77[0]\n cmd = self.executables['version_cmd']\n if cmd is not None:\n cmd = cmd[0]\n if cmd==f77:\n cmd = self.compiler_f77[0]\n else:\n f90 = self.executables['compiler_f90']\n if f90 is not None:\n f90 = f90[0]\n if cmd==f90:\n cmd = self.compiler_f90[0]\n return cmd\n\n def get_linker_so(self):\n \"\"\" Linker command to build shared libraries. \"\"\"\n f77 = self.executables['compiler_f77']\n if f77 is not None:\n f77 = f77[0]\n ln = self.executables['linker_so']\n if ln is not None:\n ln = ln[0]\n if ln==f77:\n ln = self.compiler_f77[0]\n else:\n f90 = self.executables['compiler_f90']\n if f90 is not None:\n f90 = f90[0]\n if ln==f90:\n ln = self.compiler_f90[0]\n return ln\n\n def get_flags(self):\n \"\"\" List of flags common to all compiler types. \"\"\"\n return [] + self.pic_flags\n def get_flags_version(self):\n \"\"\" List of compiler flags to print out version information. \"\"\"\n if self.executables['version_cmd']:\n return self.executables['version_cmd'][1:]\n return []\n def get_flags_f77(self):\n \"\"\" List of Fortran 77 specific flags. \"\"\"\n if self.executables['compiler_f77']:\n return self.executables['compiler_f77'][1:]\n return []\n def get_flags_f90(self):\n \"\"\" List of Fortran 90 specific flags. \"\"\"\n if self.executables['compiler_f90']:\n return self.executables['compiler_f90'][1:]\n return []\n def get_flags_free(self):\n \"\"\" List of Fortran 90 free format specific flags. \"\"\"\n return []\n def get_flags_fix(self):\n \"\"\" List of Fortran 90 fixed format specific flags. \"\"\"\n if self.executables['compiler_fix']:\n return self.executables['compiler_fix'][1:]\n return []\n def get_flags_linker_so(self):\n \"\"\" List of linker flags to build a shared library. \"\"\"\n if self.executables['linker_so']:\n return self.executables['linker_so'][1:]\n return []\n def get_flags_ar(self):\n \"\"\" List of archiver flags. \"\"\"\n if self.executables['archiver']:\n return self.executables['archiver'][1:]\n return []\n def get_flags_opt(self):\n \"\"\" List of architecture independent compiler flags. \"\"\"\n return []\n def get_flags_arch(self):\n \"\"\" List of architecture dependent compiler flags. \"\"\"\n return []\n def get_flags_debug(self):\n \"\"\" List of compiler flags to compile with debugging information. \"\"\"\n return []\n get_flags_opt_f77 = get_flags_opt_f90 = get_flags_opt\n get_flags_arch_f77 = get_flags_arch_f90 = get_flags_arch\n get_flags_debug_f77 = get_flags_debug_f90 = get_flags_debug\n\n def get_libraries(self):\n \"\"\" List of compiler libraries. \"\"\"\n return self.libraries[:]\n def get_library_dirs(self):\n \"\"\" List of compiler library directories. \"\"\"\n return self.library_dirs[:]\n\n ############################################################\n\n ## Public methods:\n\n def customize(self, dist=None):\n \"\"\" Customize Fortran compiler.\n\n This method gets Fortran compiler specific information from\n (i) class definition, (ii) environment, (iii) distutils config\n files, and (iv) command line.\n\n This method should be always called after constructing a\n compiler instance. But not in __init__ because Distribution\n instance is needed for (iii) and (iv).\n \"\"\"\n log.info('customize %s' % (self.__class__.__name__))\n if dist is None:\n # These hooks are for testing only!\n from dist import Distribution\n dist = Distribution()\n dist.script_name = os.path.basename(sys.argv[0])\n dist.script_args = ['config_fc'] + sys.argv[1:]\n dist.cmdclass['config_fc'] = config_fc\n dist.parse_config_files()\n dist.parse_command_line()\n\n conf = dist.get_option_dict('config_fc')\n noopt = conf.get('noopt',[None,0])[1]\n if 0: # change to `if 1:` when making release.\n # Don't use architecture dependent compiler flags:\n noarch = 1\n else:\n noarch = conf.get('noarch',[None,noopt])[1]\n debug = conf.get('debug',[None,0])[1]\n\n f77 = self.__get_cmd('compiler_f77','F77',(conf,'f77exec'))\n f90 = self.__get_cmd('compiler_f90','F90',(conf,'f90exec'))\n # Temporarily setting f77,f90 compilers so that\n # version_cmd can use their executables.\n if f77:\n self.set_executables(compiler_f77=[f77])\n if f90:\n self.set_executables(compiler_f90=[f90])\n\n # Must set version_cmd before others as self.get_flags*\n # methods may call self.get_version.\n vers_cmd = self.__get_cmd(self.get_version_cmd)\n if vers_cmd:\n vflags = self.__get_flags(self.get_flags_version)\n self.set_executables(version_cmd=[vers_cmd]+vflags)\n\n if f77:\n f77flags = self.__get_flags(self.get_flags_f77,'F77FLAGS',\n (conf,'f77flags'))\n if f90:\n f90flags = self.__get_flags(self.get_flags_f90,'F90FLAGS',\n (conf,'f90flags'))\n freeflags = self.__get_flags(self.get_flags_free,'FREEFLAGS',\n (conf,'freeflags'))\n # XXX Assuming that free format is default for f90 compiler.\n fix = self.__get_cmd('compiler_fix','F90',(conf,'f90exec'))\n if fix:\n fixflags = self.__get_flags(self.get_flags_fix) + f90flags\n\n oflags,aflags,dflags = [],[],[]\n if not noopt:\n oflags = self.__get_flags(self.get_flags_opt,'FOPT',(conf,'opt'))\n if f77 and self.get_flags_opt is not self.get_flags_opt_f77:\n f77flags += self.__get_flags(self.get_flags_opt_f77)\n if f90 and self.get_flags_opt is not self.get_flags_opt_f90:\n f90flags += self.__get_flags(self.get_flags_opt_f90)\n if fix and self.get_flags_opt is not self.get_flags_opt_f90:\n fixflags += self.__get_flags(self.get_flags_opt_f90)\n if not noarch:\n aflags = self.__get_flags(self.get_flags_arch,'FARCH',\n (conf,'arch'))\n if f77 and self.get_flags_arch is not self.get_flags_arch_f77:\n f77flags += self.__get_flags(self.get_flags_arch_f77)\n if f90 and self.get_flags_arch is not self.get_flags_arch_f90:\n f90flags += self.__get_flags(self.get_flags_arch_f90)\n if fix and self.get_flags_arch is not self.get_flags_arch_f90:\n fixflags += self.__get_flags(self.get_flags_arch_f90)\n if debug:\n dflags = self.__get_flags(self.get_flags_debug,'FDEBUG')\n if f77 and self.get_flags_debug is not self.get_flags_debug_f77:\n f77flags += self.__get_flags(self.get_flags_debug_f77)\n if f90 and self.get_flags_debug is not self.get_flags_debug_f90:\n f90flags += self.__get_flags(self.get_flags_debug_f90)\n if fix and self.get_flags_debug is not self.get_flags_debug_f90:\n fixflags += self.__get_flags(self.get_flags_debug_f90)\n\n fflags = self.__get_flags(self.get_flags,'FFLAGS') \\\n + dflags + oflags + aflags\n\n if f77:\n self.set_executables(compiler_f77=[f77]+f77flags+fflags)\n if f90:\n self.set_executables(compiler_f90=[f90]+freeflags+f90flags+fflags)\n if fix:\n self.set_executables(compiler_fix=[fix]+fixflags+fflags)\n\n #XXX: Do we need LDSHARED->SOSHARED, LDFLAGS->SOFLAGS\n linker_so = self.__get_cmd(self.get_linker_so,'LDSHARED')\n if linker_so:\n linker_so_flags = self.__get_flags(self.get_flags_linker_so,'LDFLAGS')\n self.set_executables(linker_so=[linker_so]+linker_so_flags)\n\n ar = self.__get_cmd('archiver','AR')\n if ar:\n arflags = self.__get_flags(self.get_flags_ar,'ARFLAGS')\n self.set_executables(archiver=[ar]+arflags)\n\n ranlib = self.__get_cmd('ranlib','RANLIB')\n if ranlib:\n self.set_executables(ranlib=[ranlib])\n\n self.set_library_dirs(self.get_library_dirs())\n self.set_libraries(self.get_libraries())\n\n verbose = conf.get('verbose',[None,0])[1]\n if verbose:\n self.dump_properties()\n return\n\n def dump_properties(self):\n \"\"\" Print out the attributes of a compiler instance. \"\"\"\n props = []\n for key in self.executables.keys() + \\\n ['version','libraries','library_dirs',\n 'object_switch','compile_switch']:\n if hasattr(self,key):\n v = getattr(self,key)\n props.append((key, None, '= '+`v`))\n props.sort()\n\n pretty_printer = FancyGetopt(props)\n for l in pretty_printer.generate_help(\"%s instance properties:\" \\\n % (self.__class__.__name__)):\n if l[:4]==' --':\n l = ' ' + l[4:]\n print l\n return\n\n ###################\n\n def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):\n \"\"\"Compile 'src' to product 'obj'.\"\"\"\n if is_f_file(src) and not has_f90_header(src):\n flavor = ':f77'\n compiler = self.compiler_f77\n elif is_free_format(src):\n flavor = ':f90'\n compiler = self.compiler_f90\n if compiler is None:\n raise DistutilsExecError, 'f90 not supported by '\\\n +self.__class__.__name__\n else:\n flavor = ':fix'\n compiler = self.compiler_fix\n if compiler is None:\n raise DistutilsExecError, 'f90 (fixed) not supported by '\\\n +self.__class__.__name__\n if self.object_switch[-1]==' ':\n o_args = [self.object_switch.strip(),obj]\n else:\n o_args = [self.object_switch.strip()+obj]\n\n assert self.compile_switch.strip()\n s_args = [self.compile_switch, src]\n\n if os.name == 'nt':\n compiler = _nt_quote_args(compiler)\n command = compiler + cc_args + s_args + o_args + extra_postargs\n\n display = '%s: %s' % (os.path.basename(compiler[0]) + flavor,\n src)\n try:\n self.spawn(command,display=display)\n except DistutilsExecError, msg:\n raise CompileError, msg\n\n return\n\n def module_options(self, module_dirs, module_build_dir):\n options = []\n if self.module_dir_switch is not None:\n if self.module_dir_switch[-1]==' ':\n options.extend([self.module_dir_switch.strip(),module_build_dir])\n else:\n options.append(self.module_dir_switch.strip()+module_build_dir)\n else:\n print 'XXX: module_build_dir=%r option ignored' % (module_build_dir)\n print 'XXX: Fix module_dir_switch for ',self.__class__.__name__\n if self.module_include_switch is not None:\n for d in [module_build_dir]+module_dirs:\n options.append('%s%s' % (self.module_include_switch, d))\n else:\n print 'XXX: module_dirs=%r option ignored' % (module_dirs)\n print 'XXX: Fix module_include_switch for ',self.__class__.__name__\n return options\n\n def library_option(self, lib):\n return \"-l\" + lib\n def library_dir_option(self, dir):\n return \"-L\" + dir\n\n# def _get_cc_args(self, pp_opts, debug, extra_preargs):\n# return []\n\n if sys.version[:3]<'2.3':\n def _get_cc_args(self, pp_opts, debug, before):\n # works for unixccompiler, emxccompiler, cygwinccompiler\n cc_args = pp_opts + ['-c']\n if debug:\n cc_args[:0] = ['-g']\n if before:\n cc_args[:0] = before\n return cc_args\n\n def compile(self, sources, output_dir=None, macros=None,\n include_dirs=None, debug=0, extra_preargs=None,\n extra_postargs=None, depends=None):\n if output_dir is None: output_dir = self.output_dir\n if macros is None: macros = self.macros\n elif type(macros) is ListType: macros = macros + (self.macros or [])\n if include_dirs is None: include_dirs = self.include_dirs\n elif type(include_dirs) in (ListType, TupleType):\n include_dirs = list(include_dirs) + (self.include_dirs or [])\n if extra_preargs is None: extra_preargs=[]\n\n display = []\n for fc in ['f77','f90','fix']:\n fcomp = getattr(self,'compiler_'+fc)\n if fcomp is None:\n continue\n display.append(\"%s(%s) options: '%s'\" \\\n % (os.path.basename(fcomp[0]),\n fc,\n ' '.join(fcomp[1:])))\n display = '\\n'.join(display)\n log.info(display)\n \n from distutils.sysconfig import python_build\n objects = self.object_filenames(sources,strip_dir=python_build,\n output_dir=output_dir)\n from distutils.ccompiler import gen_preprocess_options\n pp_opts = gen_preprocess_options(macros, include_dirs)\n build = {}\n for i in range(len(sources)):\n src,obj = sources[i],objects[i]\n ext = os.path.splitext(src)[1]\n self.mkpath(os.path.dirname(obj))\n build[obj] = src, ext\n cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)\n\n display = \"compile options: '%s'\" % (' '.join(cc_args))\n if extra_postargs:\n display += \"\\nextra options: '%s'\" % (' '.join(extra_postargs))\n log.info(display)\n\n objects_to_build = build.keys()\n for obj in objects:\n if obj in objects_to_build:\n src, ext = build[obj]\n if self.compiler_type=='absoft':\n obj = cyg2win32(obj)\n src = cyg2win32(src)\n self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)\n return objects\n def detect_language(self, sources):\n return\n\n def link(self, target_desc, objects,\n output_filename, output_dir=None, libraries=None,\n library_dirs=None, runtime_library_dirs=None,\n export_symbols=None, debug=0, extra_preargs=None,\n extra_postargs=None, build_temp=None, target_lang=None):\n objects, output_dir = self._fix_object_args(objects, output_dir)\n libraries, library_dirs, runtime_library_dirs = \\\n self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)\n\n lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,\n libraries)\n if type(output_dir) not in (StringType, NoneType):\n raise TypeError, \"'output_dir' must be a string or None\"\n if output_dir is not None:\n output_filename = os.path.join(output_dir, output_filename)\n\n if self._need_link(objects, output_filename):\n if self.library_switch[-1]==' ':\n o_args = [self.library_switch.strip(),output_filename]\n else:\n o_args = [self.library_switch.strip()+output_filename]\n \n if type(self.objects) is type(''):\n ld_args = objects + [self.objects]\n else:\n ld_args = objects + self.objects\n ld_args = ld_args + lib_opts + o_args\n if debug:\n ld_args[:0] = ['-g']\n if extra_preargs:\n ld_args[:0] = extra_preargs\n if extra_postargs:\n ld_args.extend(extra_postargs)\n self.mkpath(os.path.dirname(output_filename))\n if target_desc == CCompiler.EXECUTABLE:\n raise NotImplementedError,self.__class__.__name__+'.linker_exe attribute'\n else:\n linker = self.linker_so[:]\n if os.name == 'nt':\n linker = _nt_quote_args(linker)\n command = linker + ld_args\n try:\n self.spawn(command)\n except DistutilsExecError, msg:\n raise LinkError, msg\n else:\n log.debug(\"skipping %s (up-to-date)\", output_filename)\n return\n\n ############################################################\n\n ## Private methods:\n\n def __get_cmd(self, command, envvar=None, confvar=None):\n if command is None:\n var = None\n elif type(command) is type(''):\n var = self.executables[command]\n if var is not None:\n var = var[0]\n else:\n var = command()\n if envvar is not None:\n var = os.environ.get(envvar, var)\n if confvar is not None:\n var = confvar[0].get(confvar[1], [None,var])[1]\n return var\n\n def __get_flags(self, command, envvar=None, confvar=None):\n if command is None:\n var = []\n elif type(command) is type(''):\n var = self.executables[command][1:]\n else:\n var = command()\n if envvar is not None:\n var = os.environ.get(envvar, var)\n if confvar is not None:\n var = confvar[0].get(confvar[1], [None,var])[1]\n if type(var) is type(''):\n var = split_quoted(var)\n return var\n\n ## class FCompiler\n\n##############################################################################\n\nfcompiler_class = {'gnu':('gnufcompiler','GnuFCompiler',\n \"GNU Fortran Compiler\"),\n 'pg':('pgfcompiler','PGroupFCompiler',\n \"Portland Group Fortran Compiler\"),\n 'absoft':('absoftfcompiler','AbsoftFCompiler',\n \"Absoft Corp Fortran Compiler\"),\n 'mips':('mipsfcompiler','MipsFCompiler',\n \"MIPSpro Fortran Compiler\"),\n 'sun':('sunfcompiler','SunFCompiler',\n \"Sun|Forte Fortran 95 Compiler\"),\n 'intel':('intelfcompiler','IntelFCompiler',\n \"Intel Fortran Compiler for 32-bit apps\"),\n 'intelv':('intelfcompiler','IntelVisualFCompiler',\n \"Intel Visual Fortran Compiler for 32-bit apps\"),\n 'intele':('intelfcompiler','IntelItaniumFCompiler',\n \"Intel Fortran Compiler for Itanium apps\"),\n 'intelev':('intelfcompiler','IntelItaniumVisualFCompiler',\n \"Intel Visual Fortran Compiler for Itanium apps\"),\n 'nag':('nagfcompiler','NAGFCompiler',\n \"NAGWare Fortran 95 Compiler\"),\n 'compaq':('compaqfcompiler','CompaqFCompiler',\n \"Compaq Fortran Compiler\"),\n 'compaqv':('compaqfcompiler','CompaqVisualFCompiler',\n \"DIGITAL|Compaq Visual Fortran Compiler\"),\n 'vast':('vastfcompiler','VastFCompiler',\n \"Pacific-Sierra Research Fortran 90 Compiler\"),\n 'hpux':('hpuxfcompiler','HPUXFCompiler',\n \"HP Fortran 90 Compiler\"),\n 'lahey':('laheyfcompiler','LaheyFCompiler',\n \"Lahey/Fujitsu Fortran 95 Compiler\"),\n 'ibm':('ibmfcompiler','IbmFCompiler',\n \"IBM XL Fortran Compiler\"),\n 'f':('fcompiler','FFCompiler',\n \"Fortran Company/NAG F Compiler\"),\n }\n\n_default_compilers = (\n # Platform mappings\n ('win32',('gnu','intelv','absoft','compaqv','intelev')),\n ('cygwin.*',('gnu','intelv','absoft','compaqv','intelev')),\n ('linux.*',('gnu','intel','lahey','pg','absoft','nag','vast','compaq',\n 'intele')),\n ('darwin.*',('nag','absoft','ibm','gnu')),\n ('sunos.*',('forte','gnu','sun')),\n ('irix.*',('mips','gnu')),\n ('aix.*',('ibm','gnu')),\n # OS mappings\n ('posix',('gnu',)),\n ('nt',('gnu',)),\n ('mac',('gnu',)),\n )\n\ndef _find_existing_fcompiler(compilers, osname=None, platform=None):\n for compiler in compilers:\n v = None\n try:\n c = new_fcompiler(plat=platform, compiler=compiler)\n c.customize()\n v = c.get_version()\n except DistutilsModuleError:\n pass\n except Exception, msg:\n log.warn(msg)\n if v is not None:\n return compiler\n return\n\ndef get_default_fcompiler(osname=None, platform=None):\n \"\"\" Determine the default Fortran compiler to use for the given platform. \"\"\"\n if osname is None:\n osname = os.name\n if platform is None:\n platform = sys.platform\n matching_compilers = []\n for pattern, compiler in _default_compilers:\n if re.match(pattern, platform) is not None or \\\n re.match(pattern, osname) is not None:\n if type(compiler) is type(()):\n matching_compilers.extend(list(compiler))\n else:\n matching_compilers.append(compiler)\n if not matching_compilers:\n matching_compilers.append('gnu')\n compiler = _find_existing_fcompiler(matching_compilers,\n osname=osname,\n platform=platform)\n if compiler is not None:\n return compiler\n return matching_compilers[0]\n\ndef new_fcompiler(plat=None,\n compiler=None,\n verbose=0,\n dry_run=0,\n force=0):\n \"\"\" Generate an instance of some FCompiler subclass for the supplied\n platform/compiler combination.\n \"\"\"\n if plat is None:\n plat = os.name\n try:\n if compiler is None:\n compiler = get_default_fcompiler(plat)\n (module_name, class_name, long_description) = fcompiler_class[compiler]\n except KeyError:\n msg = \"don't know how to compile Fortran code on platform '%s'\" % plat\n if compiler is not None:\n msg = msg + \" with '%s' compiler.\" % compiler\n msg = msg + \" Supported compilers are: %s)\" \\\n % (','.join(fcompiler_class.keys()))\n raise DistutilsPlatformError, msg\n\n try:\n module_name = 'scipy_distutils.'+module_name\n __import__ (module_name)\n module = sys.modules[module_name]\n klass = vars(module)[class_name]\n except ImportError:\n raise DistutilsModuleError, \\\n \"can't compile Fortran code: unable to load module '%s'\" % \\\n module_name\n except KeyError:\n raise DistutilsModuleError, \\\n (\"can't compile Fortran code: unable to find class '%s' \" +\n \"in module '%s'\") % (class_name, module_name)\n compiler = klass(None, dry_run, force)\n log.debug('new_fcompiler returns %s' % (klass))\n return compiler\n\ndef show_fcompilers(dist = None):\n \"\"\" Print list of available compilers (used by the \"--help-fcompiler\"\n option to \"config_fc\").\n \"\"\"\n if dist is None:\n from dist import Distribution\n dist = Distribution()\n dist.script_name = os.path.basename(sys.argv[0])\n dist.script_args = ['config_fc'] + sys.argv[1:]\n dist.cmdclass['config_fc'] = config_fc\n dist.parse_config_files()\n dist.parse_command_line()\n\n compilers = []\n compilers_na = []\n compilers_ni = []\n for compiler in fcompiler_class.keys():\n v = 'N/A'\n try:\n c = new_fcompiler(compiler=compiler)\n c.customize(dist)\n v = c.get_version()\n except DistutilsModuleError:\n pass\n except Exception, msg:\n log.warn(msg)\n if v is None:\n compilers_na.append((\"fcompiler=\"+compiler, None,\n fcompiler_class[compiler][2]))\n elif v=='N/A':\n compilers_ni.append((\"fcompiler=\"+compiler, None,\n fcompiler_class[compiler][2]))\n else:\n compilers.append((\"fcompiler=\"+compiler, None,\n fcompiler_class[compiler][2] + ' (%s)' % v))\n compilers.sort()\n compilers_na.sort()\n pretty_printer = FancyGetopt(compilers)\n pretty_printer.print_help(\"List of available Fortran compilers:\")\n pretty_printer = FancyGetopt(compilers_na)\n pretty_printer.print_help(\"List of unavailable Fortran compilers:\")\n if compilers_ni:\n pretty_printer = FancyGetopt(compilers_ni)\n pretty_printer.print_help(\"List of unimplemented Fortran compilers:\")\n print \"For compiler details, run 'config_fc --verbose' setup command.\"\n\ndef dummy_fortran_file():\n import tempfile\n dummy_name = tempfile.mktemp()+'__dummy'\n dummy = open(dummy_name+'.f','w')\n dummy.write(\" subroutine dummy()\\n end\\n\")\n dummy.close()\n def rm_file(name=dummy_name,log_threshold=log._global_log.threshold):\n save_th = log._global_log.threshold\n log.set_threshold(log_threshold)\n try: os.remove(name+'.f'); log.debug('removed '+name+'.f')\n except OSError: pass\n try: os.remove(name+'.o'); log.debug('removed '+name+'.o')\n except OSError: pass\n log.set_threshold(save_th)\n atexit.register(rm_file)\n return dummy_name\n\nis_f_file = re.compile(r'.*[.](for|ftn|f77|f)\\Z',re.I).match\n_has_f_header = re.compile(r'-[*]-\\s*fortran\\s*-[*]-',re.I).search\n_has_f90_header = re.compile(r'-[*]-\\s*f90\\s*-[*]-',re.I).search\n_has_fix_header = re.compile(r'-[*]-\\s*fix\\s*-[*]-',re.I).search\n_free_f90_start = re.compile(r'[^c*]\\s*[^\\s\\d\\t]',re.I).match\ndef is_free_format(file):\n \"\"\"Check if file is in free format Fortran.\"\"\"\n # f90 allows both fixed and free format, assuming fixed unless\n # signs of free format are detected.\n result = 0\n f = open(file,'r')\n line = f.readline()\n n = 15 # the number of non-comment lines to scan for hints\n if _has_f_header(line):\n n = 0\n elif _has_f90_header(line):\n n = 0\n result = 1\n while n>0 and line:\n if line[0]!='!':\n n -= 1\n if (line[0]!='\\t' and _free_f90_start(line[:5])) or line[-2:-1]=='&':\n result = 1\n break\n line = f.readline()\n f.close()\n return result\n\ndef has_f90_header(src):\n f = open(src,'r')\n line = f.readline()\n f.close()\n return _has_f90_header(line) or _has_fix_header(line)\nif __name__ == '__main__':\n show_fcompilers()\n", "methods": [ { "name": "get_version_cmd", "long_name": "get_version_cmd( self )", "filename": "fcompiler.py", "nloc": 16, "complexity": 6, "token_count": 96, "parameters": [ "self" ], "start_line": 191, "end_line": 207, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 1 }, { "name": "get_linker_so", "long_name": "get_linker_so( self )", "filename": "fcompiler.py", "nloc": 16, "complexity": 6, "token_count": 96, "parameters": [ "self" ], "start_line": 209, "end_line": 225, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 1 }, { "name": "get_flags", "long_name": "get_flags( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 227, "end_line": 229, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_flags_version", "long_name": "get_flags_version( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 230, "end_line": 234, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_f77", "long_name": "get_flags_f77( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 235, "end_line": 239, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_f90", "long_name": "get_flags_f90( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 240, "end_line": 244, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_free", "long_name": "get_flags_free( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 245, "end_line": 247, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_flags_fix", "long_name": "get_flags_fix( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 248, "end_line": 252, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_linker_so", "long_name": "get_flags_linker_so( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 253, "end_line": 257, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_ar", "long_name": "get_flags_ar( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 258, "end_line": 262, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 263, "end_line": 265, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_flags_arch", "long_name": "get_flags_arch( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 266, "end_line": 268, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_flags_debug", "long_name": "get_flags_debug( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 269, "end_line": 271, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 276, "end_line": 278, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_library_dirs", "long_name": "get_library_dirs( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 279, "end_line": 281, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "customize", "long_name": "customize( self , dist = None )", "filename": "fcompiler.py", "nloc": 89, "complexity": 37, "token_count": 827, "parameters": [ "self", "dist" ], "start_line": 287, "end_line": 405, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 119, "top_nesting_level": 1 }, { "name": "dump_properties", "long_name": "dump_properties( self )", "filename": "fcompiler.py", "nloc": 16, "complexity": 5, "token_count": 117, "parameters": [ "self" ], "start_line": 407, "end_line": 424, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 18, "top_nesting_level": 1 }, { "name": "_compile", "long_name": "_compile( self , obj , src , ext , cc_args , extra_postargs , pp_opts )", "filename": "fcompiler.py", "nloc": 32, "complexity": 9, "token_count": 217, "parameters": [ "self", "obj", "src", "ext", "cc_args", "extra_postargs", "pp_opts" ], "start_line": 428, "end_line": 464, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 37, "top_nesting_level": 1 }, { "name": "module_options", "long_name": "module_options( self , module_dirs , module_build_dir )", "filename": "fcompiler.py", "nloc": 17, "complexity": 5, "token_count": 129, "parameters": [ "self", "module_dirs", "module_build_dir" ], "start_line": 466, "end_line": 482, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 1 }, { "name": "library_option", "long_name": "library_option( self , lib )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 11, "parameters": [ "self", "lib" ], "start_line": 484, "end_line": 485, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "library_dir_option", "long_name": "library_dir_option( self , dir )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 11, "parameters": [ "self", "dir" ], "start_line": 486, "end_line": 487, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "_get_cc_args", "long_name": "_get_cc_args( self , pp_opts , debug , before )", "filename": "fcompiler.py", "nloc": 7, "complexity": 3, "token_count": 42, "parameters": [ "self", "pp_opts", "debug", "before" ], "start_line": 493, "end_line": 500, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 2 }, { "name": "compile", "long_name": "compile( self , sources , output_dir = None , macros = None , include_dirs = None , debug = 0 , extra_preargs = None , extra_postargs = None , depends = None )", "filename": "fcompiler.py", "nloc": 46, "complexity": 16, "token_count": 405, "parameters": [ "self", "sources", "output_dir", "macros", "include_dirs", "debug", "extra_preargs", "extra_postargs", "depends" ], "start_line": 502, "end_line": 551, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 50, "top_nesting_level": 2 }, { "name": "detect_language", "long_name": "detect_language( self , sources )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 8, "parameters": [ "self", "sources" ], "start_line": 552, "end_line": 553, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 2 }, { "name": "link", "long_name": "link( self , target_desc , objects , output_filename , output_dir = None , libraries = None , library_dirs = None , runtime_library_dirs = None , export_symbols = None , debug = 0 , extra_preargs = None , extra_postargs = None , build_temp = None , target_lang = None )", "filename": "fcompiler.py", "nloc": 45, "complexity": 12, "token_count": 331, "parameters": [ "self", "target_desc", "objects", "output_filename", "output_dir", "libraries", "library_dirs", "runtime_library_dirs", "export_symbols", "debug", "extra_preargs", "extra_postargs", "build_temp", "target_lang" ], "start_line": 555, "end_line": 602, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 48, "top_nesting_level": 1 }, { "name": "__get_cmd", "long_name": "__get_cmd( self , command , envvar = None , confvar = None )", "filename": "fcompiler.py", "nloc": 14, "complexity": 6, "token_count": 110, "parameters": [ "self", "command", "envvar", "confvar" ], "start_line": 608, "end_line": 621, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 1 }, { "name": "__get_flags", "long_name": "__get_flags( self , command , envvar = None , confvar = None )", "filename": "fcompiler.py", "nloc": 14, "complexity": 6, "token_count": 120, "parameters": [ "self", "command", "envvar", "confvar" ], "start_line": 623, "end_line": 636, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 1 }, { "name": "_find_existing_fcompiler", "long_name": "_find_existing_fcompiler( compilers , osname = None , platform = None )", "filename": "fcompiler.py", "nloc": 14, "complexity": 5, "token_count": 71, "parameters": [ "compilers", "osname", "platform" ], "start_line": 696, "end_line": 709, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 0 }, { "name": "get_default_fcompiler", "long_name": "get_default_fcompiler( osname = None , platform = None )", "filename": "fcompiler.py", "nloc": 21, "complexity": 9, "token_count": 135, "parameters": [ "osname", "platform" ], "start_line": 711, "end_line": 732, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 0 }, { "name": "new_fcompiler", "long_name": "new_fcompiler( plat = None , compiler = None , verbose = 0 , dry_run = 0 , force = 0 )", "filename": "fcompiler.py", "nloc": 34, "complexity": 7, "token_count": 182, "parameters": [ "plat", "compiler", "verbose", "dry_run", "force" ], "start_line": 734, "end_line": 771, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 38, "top_nesting_level": 0 }, { "name": "show_fcompilers", "long_name": "show_fcompilers( dist = None )", "filename": "fcompiler.py", "nloc": 41, "complexity": 8, "token_count": 261, "parameters": [ "dist" ], "start_line": 773, "end_line": 817, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 45, "top_nesting_level": 0 }, { "name": "dummy_fortran_file.rm_file", "long_name": "dummy_fortran_file.rm_file( name = dummy_name , log_threshold = log . _global_log . threshold )", "filename": "fcompiler.py", "nloc": 8, "complexity": 3, "token_count": 84, "parameters": [ "name", "log_threshold" ], "start_line": 825, "end_line": 832, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 1 }, { "name": "dummy_fortran_file", "long_name": "dummy_fortran_file( )", "filename": "fcompiler.py", "nloc": 9, "complexity": 1, "token_count": 46, "parameters": [], "start_line": 819, "end_line": 834, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 0 }, { "name": "is_free_format", "long_name": "is_free_format( file )", "filename": "fcompiler.py", "nloc": 19, "complexity": 9, "token_count": 114, "parameters": [ "file" ], "start_line": 841, "end_line": 862, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 0 }, { "name": "has_f90_header", "long_name": "has_f90_header( src )", "filename": "fcompiler.py", "nloc": 5, "complexity": 2, "token_count": 35, "parameters": [ "src" ], "start_line": 864, "end_line": 868, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 } ], "methods_before": [ { "name": "get_version_cmd", "long_name": "get_version_cmd( self )", "filename": "fcompiler.py", "nloc": 16, "complexity": 6, "token_count": 96, "parameters": [ "self" ], "start_line": 191, "end_line": 207, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 1 }, { "name": "get_linker_so", "long_name": "get_linker_so( self )", "filename": "fcompiler.py", "nloc": 16, "complexity": 6, "token_count": 96, "parameters": [ "self" ], "start_line": 209, "end_line": 225, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 1 }, { "name": "get_flags", "long_name": "get_flags( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 227, "end_line": 229, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_flags_version", "long_name": "get_flags_version( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 230, "end_line": 234, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_f77", "long_name": "get_flags_f77( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 235, "end_line": 239, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_f90", "long_name": "get_flags_f90( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 240, "end_line": 244, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_free", "long_name": "get_flags_free( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 245, "end_line": 247, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_flags_fix", "long_name": "get_flags_fix( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 248, "end_line": 252, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_linker_so", "long_name": "get_flags_linker_so( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 253, "end_line": 257, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_ar", "long_name": "get_flags_ar( self )", "filename": "fcompiler.py", "nloc": 4, "complexity": 2, "token_count": 28, "parameters": [ "self" ], "start_line": 258, "end_line": 262, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 263, "end_line": 265, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_flags_arch", "long_name": "get_flags_arch( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 266, "end_line": 268, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_flags_debug", "long_name": "get_flags_debug( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 269, "end_line": 271, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 276, "end_line": 278, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "get_library_dirs", "long_name": "get_library_dirs( self )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 279, "end_line": 281, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "customize", "long_name": "customize( self , dist = None )", "filename": "fcompiler.py", "nloc": 89, "complexity": 37, "token_count": 827, "parameters": [ "self", "dist" ], "start_line": 287, "end_line": 405, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 119, "top_nesting_level": 1 }, { "name": "dump_properties", "long_name": "dump_properties( self )", "filename": "fcompiler.py", "nloc": 16, "complexity": 5, "token_count": 117, "parameters": [ "self" ], "start_line": 407, "end_line": 424, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 18, "top_nesting_level": 1 }, { "name": "_compile", "long_name": "_compile( self , obj , src , ext , cc_args , extra_postargs , pp_opts )", "filename": "fcompiler.py", "nloc": 32, "complexity": 9, "token_count": 217, "parameters": [ "self", "obj", "src", "ext", "cc_args", "extra_postargs", "pp_opts" ], "start_line": 428, "end_line": 464, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 37, "top_nesting_level": 1 }, { "name": "module_options", "long_name": "module_options( self , module_dirs , module_build_dir )", "filename": "fcompiler.py", "nloc": 17, "complexity": 5, "token_count": 129, "parameters": [ "self", "module_dirs", "module_build_dir" ], "start_line": 466, "end_line": 482, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 1 }, { "name": "library_option", "long_name": "library_option( self , lib )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 11, "parameters": [ "self", "lib" ], "start_line": 484, "end_line": 485, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "library_dir_option", "long_name": "library_dir_option( self , dir )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 11, "parameters": [ "self", "dir" ], "start_line": 486, "end_line": 487, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "_get_cc_args", "long_name": "_get_cc_args( self , pp_opts , debug , before )", "filename": "fcompiler.py", "nloc": 7, "complexity": 3, "token_count": 42, "parameters": [ "self", "pp_opts", "debug", "before" ], "start_line": 493, "end_line": 500, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 2 }, { "name": "compile", "long_name": "compile( self , sources , output_dir = None , macros = None , include_dirs = None , debug = 0 , extra_preargs = None , extra_postargs = None , depends = None )", "filename": "fcompiler.py", "nloc": 46, "complexity": 16, "token_count": 405, "parameters": [ "self", "sources", "output_dir", "macros", "include_dirs", "debug", "extra_preargs", "extra_postargs", "depends" ], "start_line": 502, "end_line": 551, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 50, "top_nesting_level": 2 }, { "name": "detect_language", "long_name": "detect_language( self , sources )", "filename": "fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 8, "parameters": [ "self", "sources" ], "start_line": 552, "end_line": 553, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 2 }, { "name": "link", "long_name": "link( self , target_desc , objects , output_filename , output_dir = None , libraries = None , library_dirs = None , runtime_library_dirs = None , export_symbols = None , debug = 0 , extra_preargs = None , extra_postargs = None , build_temp = None , target_lang = None )", "filename": "fcompiler.py", "nloc": 45, "complexity": 12, "token_count": 331, "parameters": [ "self", "target_desc", "objects", "output_filename", "output_dir", "libraries", "library_dirs", "runtime_library_dirs", "export_symbols", "debug", "extra_preargs", "extra_postargs", "build_temp", "target_lang" ], "start_line": 555, "end_line": 602, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 48, "top_nesting_level": 1 }, { "name": "__get_cmd", "long_name": "__get_cmd( self , command , envvar = None , confvar = None )", "filename": "fcompiler.py", "nloc": 14, "complexity": 6, "token_count": 110, "parameters": [ "self", "command", "envvar", "confvar" ], "start_line": 608, "end_line": 621, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 1 }, { "name": "__get_flags", "long_name": "__get_flags( self , command , envvar = None , confvar = None )", "filename": "fcompiler.py", "nloc": 14, "complexity": 6, "token_count": 120, "parameters": [ "self", "command", "envvar", "confvar" ], "start_line": 623, "end_line": 636, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 1 }, { "name": "_find_existing_fcompiler", "long_name": "_find_existing_fcompiler( compilers , osname = None , platform = None )", "filename": "fcompiler.py", "nloc": 14, "complexity": 5, "token_count": 71, "parameters": [ "compilers", "osname", "platform" ], "start_line": 694, "end_line": 707, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 0 }, { "name": "get_default_fcompiler", "long_name": "get_default_fcompiler( osname = None , platform = None )", "filename": "fcompiler.py", "nloc": 21, "complexity": 9, "token_count": 135, "parameters": [ "osname", "platform" ], "start_line": 709, "end_line": 730, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 0 }, { "name": "new_fcompiler", "long_name": "new_fcompiler( plat = None , compiler = None , verbose = 0 , dry_run = 0 , force = 0 )", "filename": "fcompiler.py", "nloc": 34, "complexity": 7, "token_count": 182, "parameters": [ "plat", "compiler", "verbose", "dry_run", "force" ], "start_line": 732, "end_line": 769, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 38, "top_nesting_level": 0 }, { "name": "show_fcompilers", "long_name": "show_fcompilers( dist = None )", "filename": "fcompiler.py", "nloc": 41, "complexity": 8, "token_count": 261, "parameters": [ "dist" ], "start_line": 771, "end_line": 815, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 45, "top_nesting_level": 0 }, { "name": "dummy_fortran_file.rm_file", "long_name": "dummy_fortran_file.rm_file( name = dummy_name , log_threshold = log . _global_log . threshold )", "filename": "fcompiler.py", "nloc": 8, "complexity": 3, "token_count": 84, "parameters": [ "name", "log_threshold" ], "start_line": 823, "end_line": 830, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 1 }, { "name": "dummy_fortran_file", "long_name": "dummy_fortran_file( )", "filename": "fcompiler.py", "nloc": 9, "complexity": 1, "token_count": 46, "parameters": [], "start_line": 817, "end_line": 832, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 0 }, { "name": "is_free_format", "long_name": "is_free_format( file )", "filename": "fcompiler.py", "nloc": 19, "complexity": 9, "token_count": 114, "parameters": [ "file" ], "start_line": 839, "end_line": 860, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 0 }, { "name": "has_f90_header", "long_name": "has_f90_header( src )", "filename": "fcompiler.py", "nloc": 5, "complexity": 2, "token_count": 35, "parameters": [ "src" ], "start_line": 862, "end_line": 866, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 } ], "changed_methods": [], "nloc": 655, "complexity": 177, "token_count": 4430, "diff_parsed": { "added": [ " 'g95':('g95fcompiler','G95FCompiler',", " \"GNU Fortran 95 Compiler\"),", " ('win32',('gnu','intelv','absoft','compaqv','intelev','g95')),", " ('cygwin.*',('gnu','intelv','absoft','compaqv','intelev','g95')),", " 'intele','g95')),", " ('darwin.*',('nag','absoft','ibm','gnu','g95')),", " ('sunos.*',('forte','gnu','sun','g95'))," ], "deleted": [ " ('win32',('gnu','intelv','absoft','compaqv','intelev')),", " ('cygwin.*',('gnu','intelv','absoft','compaqv','intelev')),", " 'intele')),", " ('darwin.*',('nag','absoft','ibm','gnu')),", " ('sunos.*',('forte','gnu','sun'))," ] } }, { "old_path": null, "new_path": "scipy_distutils/g95fcompiler.py", "filename": "g95fcompiler.py", "extension": "py", "change_type": "ADD", "diff": "@@ -0,0 +1,41 @@\n+# http://g95.sourceforge.net/\n+\n+import os\n+import sys\n+\n+from cpuinfo import cpu\n+from fcompiler import FCompiler\n+\n+class G95FCompiler(FCompiler):\n+\n+ compiler_type = 'g95'\n+ version_pattern = r'G95.*\\(experimental\\) \\(g95!\\) (?P.*)\\).*'\n+\n+ executables = {\n+ 'version_cmd' : [\"g95\", \"--version\"],\n+ 'compiler_f77' : [\"g95\", \"-ffixed-form\"],\n+ 'compiler_fix' : [\"g95\", \"-ffixed-form\"],\n+ 'compiler_f90' : [\"g95\"],\n+ 'linker_so' : [\"g95\",\"-shared\"],\n+ 'archiver' : [\"ar\", \"-cr\"],\n+ 'ranlib' : [\"ranlib\"]\n+ }\n+ pic_flags = ['-fpic']\n+ module_dir_switch = '-fmod='\n+ module_include_switch = '-I'\n+\n+ def get_flags(self):\n+ return ['-fno-second-underscore']\n+ def get_flags_opt(self):\n+ return ['-O']\n+ def get_flags_debug(self):\n+ return ['-g']\n+\n+if __name__ == '__main__':\n+ from distutils import log\n+ log.set_verbosity(2)\n+ from fcompiler import new_fcompiler\n+ #compiler = new_fcompiler(compiler='g95')\n+ compiler = G95FCompiler()\n+ compiler.customize()\n+ print compiler.get_version()\n", "added_lines": 41, "deleted_lines": 0, "source_code": "# http://g95.sourceforge.net/\n\nimport os\nimport sys\n\nfrom cpuinfo import cpu\nfrom fcompiler import FCompiler\n\nclass G95FCompiler(FCompiler):\n\n compiler_type = 'g95'\n version_pattern = r'G95.*\\(experimental\\) \\(g95!\\) (?P.*)\\).*'\n\n executables = {\n 'version_cmd' : [\"g95\", \"--version\"],\n 'compiler_f77' : [\"g95\", \"-ffixed-form\"],\n 'compiler_fix' : [\"g95\", \"-ffixed-form\"],\n 'compiler_f90' : [\"g95\"],\n 'linker_so' : [\"g95\",\"-shared\"],\n 'archiver' : [\"ar\", \"-cr\"],\n 'ranlib' : [\"ranlib\"]\n }\n pic_flags = ['-fpic']\n module_dir_switch = '-fmod='\n module_include_switch = '-I'\n\n def get_flags(self):\n return ['-fno-second-underscore']\n def get_flags_opt(self):\n return ['-O']\n def get_flags_debug(self):\n return ['-g']\n\nif __name__ == '__main__':\n from distutils import log\n log.set_verbosity(2)\n from fcompiler import new_fcompiler\n #compiler = new_fcompiler(compiler='g95')\n compiler = G95FCompiler()\n compiler.customize()\n print compiler.get_version()\n", "source_code_before": null, "methods": [ { "name": "get_flags", "long_name": "get_flags( self )", "filename": "g95fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 27, "end_line": 28, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "g95fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 29, "end_line": 30, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_flags_debug", "long_name": "get_flags_debug( self )", "filename": "g95fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 31, "end_line": 32, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 } ], "methods_before": [], "changed_methods": [ { "name": "get_flags_debug", "long_name": "get_flags_debug( self )", "filename": "g95fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 31, "end_line": 32, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_flags", "long_name": "get_flags( self )", "filename": "g95fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 27, "end_line": 28, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_flags_opt", "long_name": "get_flags_opt( self )", "filename": "g95fcompiler.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 29, "end_line": 30, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 } ], "nloc": 32, "complexity": 3, "token_count": 156, "diff_parsed": { "added": [ "# http://g95.sourceforge.net/", "", "import os", "import sys", "", "from cpuinfo import cpu", "from fcompiler import FCompiler", "", "class G95FCompiler(FCompiler):", "", " compiler_type = 'g95'", " version_pattern = r'G95.*\\(experimental\\) \\(g95!\\) (?P.*)\\).*'", "", " executables = {", " 'version_cmd' : [\"g95\", \"--version\"],", " 'compiler_f77' : [\"g95\", \"-ffixed-form\"],", " 'compiler_fix' : [\"g95\", \"-ffixed-form\"],", " 'compiler_f90' : [\"g95\"],", " 'linker_so' : [\"g95\",\"-shared\"],", " 'archiver' : [\"ar\", \"-cr\"],", " 'ranlib' : [\"ranlib\"]", " }", " pic_flags = ['-fpic']", " module_dir_switch = '-fmod='", " module_include_switch = '-I'", "", " def get_flags(self):", " return ['-fno-second-underscore']", " def get_flags_opt(self):", " return ['-O']", " def get_flags_debug(self):", " return ['-g']", "", "if __name__ == '__main__':", " from distutils import log", " log.set_verbosity(2)", " from fcompiler import new_fcompiler", " #compiler = new_fcompiler(compiler='g95')", " compiler = G95FCompiler()", " compiler.customize()", " print compiler.get_version()" ], "deleted": [] } } ] }, { "hash": "64fb0eeb6af126e9757435c90c5d9165dc783881", "msg": "Fixed bug in build_ext --backend support, expose also ext. module private names to backend wrapper.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-03-22T08:55:14+00:00", "author_timezone": 0, "committer_date": "2005-03-22T08:55:14+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "ebf0bc4e3e0fd00d0fd0c86faf2744004d4c4346" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 2, "insertions": 9, "lines": 11, "files": 1, "dmm_unit_size": 1.0, "dmm_unit_complexity": 1.0, "dmm_unit_interfacing": 0.0, "modified_files": [ { "old_path": "scipy_distutils/command/build_src.py", "new_path": "scipy_distutils/command/build_src.py", "filename": "build_src.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -41,7 +41,11 @@\n if _which[0] is None:\n _which = _backends[0], \"defaulted\"\n \n-exec \"from _\" + _which[0] + \".%(name)s import *\"\n+exec \"import _\" + _which[0] + \".%(name)s as ___m\"\n+for ___a in dir(___m):\n+ exec ___a + \" = getattr(___m,___a)\"\n+else:\n+ del ___a, ___m\n '''\n \n \n@@ -126,7 +130,10 @@ def backend_split(self):\n for ext in self.extensions:\n name = ext.name.split('.')[-1]\n fullname = self.get_ext_fullname(ext.name)\n- def func(extension, src_dir):\n+ def func(extension, src_dir,\n+ name=name,\n+ fullname=fullname,\n+ backends=backends):\n source = os.path.join(os.path.dirname(src_dir),name+'.py')\n if newer(__file__, source):\n f = open(source,'w')\n", "added_lines": 9, "deleted_lines": 2, "source_code": "\"\"\" Build swig, f2py, weave, sources.\n\"\"\"\n\nimport os\nimport re\n\nfrom distutils.cmd import Command\nfrom distutils.command import build_ext, build_py\nfrom distutils.util import convert_path\nfrom distutils.dep_util import newer_group, newer\n\nfrom scipy_distutils import log\nfrom scipy_distutils.misc_util import fortran_ext_match, all_strings, dot_join\nfrom scipy_distutils.from_template import process_file\nfrom scipy_distutils.extension import Extension\n\n_split_ext_template = '''\nimport os\nimport sys\n\n_which = None, None\n_backends = %(backends)r\nif hasattr(sys, \"argv\"):\n i = -1\n for a in sys.argv:\n i += 1\n if a.lower()[2:] in _backends: \n _which = a.lower()[2:], \"command line\"\n del sys.argv[i]\n os.environ[_which[0].upper()] = _which[0]\n break\n del a\n\nif _which[0] is None:\n for b in _backends:\n if os.environ.get(b.upper(),None):\n _which = b, \"environment var\"\n break\n del b\n\nif _which[0] is None:\n _which = _backends[0], \"defaulted\"\n\nexec \"import _\" + _which[0] + \".%(name)s as ___m\"\nfor ___a in dir(___m):\n exec ___a + \" = getattr(___m,___a)\"\nelse:\n del ___a, ___m\n'''\n\n\nclass build_src(build_ext.build_ext):\n\n description = \"build sources from SWIG, F2PY files or a function\"\n\n user_options = [\n ('build-src=', 'd', \"directory to \\\"build\\\" sources to\"),\n ('f2pyflags=', None, \"additonal flags to f2py\"),\n ('swigflags=', None, \"additional flags to swig\"),\n ('force', 'f', \"forcibly build everything (ignore file timestamps)\"),\n ('inplace', 'i',\n \"ignore build-lib and put compiled extensions into the source \" +\n \"directory alongside your pure Python modules\"),\n ]\n\n boolean_options = ['force','inplace']\n\n help_options = []\n\n def initialize_options(self):\n self.extensions = None\n self.package = None\n self.py_modules = None\n self.build_src = None\n self.build_lib = None\n self.build_base = None\n self.force = None\n self.inplace = None\n self.package_dir = None\n self.f2pyflags = None\n self.swigflags = None\n self.backends = None\n return\n\n def finalize_options(self):\n self.set_undefined_options('build',\n ('build_base', 'build_base'),\n ('build_lib', 'build_lib'),\n ('force', 'force'))\n if self.package is None:\n self.package = self.distribution.ext_package\n self.extensions = self.distribution.ext_modules\n self.libraries = self.distribution.libraries or []\n self.py_modules = self.distribution.py_modules\n if self.build_src is None:\n self.build_src = os.path.join(self.build_base, 'src')\n if self.inplace is None:\n build_ext = self.get_finalized_command('build_ext')\n self.inplace = build_ext.inplace\n if self.backends is None:\n build_ext = self.get_finalized_command('build_ext')\n self.backends = build_ext.backends\n\n # py_modules is used in build_py.find_package_modules\n self.py_modules = {}\n\n if self.f2pyflags is None:\n self.f2pyflags = []\n else:\n self.f2pyflags = self.f2pyflags.split() # XXX spaces??\n\n if self.swigflags is None:\n self.swigflags = []\n else:\n self.swigflags = self.swigflags.split() # XXX spaces??\n return\n\n def run(self):\n if not (self.extensions or self.libraries):\n return\n if self.backends is not None:\n self.backend_split()\n self.build_sources()\n return\n\n def backend_split(self):\n log.info('splitting extensions for backends: %s' % (self.backends))\n extensions = []\n backends = self.backends.split(',')\n for ext in self.extensions:\n name = ext.name.split('.')[-1]\n fullname = self.get_ext_fullname(ext.name)\n def func(extension, src_dir,\n name=name,\n fullname=fullname,\n backends=backends):\n source = os.path.join(os.path.dirname(src_dir),name+'.py')\n if newer(__file__, source):\n f = open(source,'w')\n f.write(_split_ext_template \\\n % {'name':name,'fullname':fullname,\n 'backends':backends})\n f.close()\n return [ source ]\n def func_init(extension, src_dir):\n source = os.path.join(src_dir,'__init__.py')\n if newer(__file__, source):\n f = open(source,'w')\n f.close()\n return [source]\n for b in backends:\n new_ext = self.split_extension(ext,b)\n new_ext.sources.append(func_init)\n extensions.append(new_ext)\n\n new_package = dot_join(*(ext.name.split('.')[:-1]+['_'+b]))\n new_package_dir = os.path.join(*([self.build_src]+ext.name.split('.')[:-1]+['_'+b]))\n if new_package not in self.distribution.packages:\n self.distribution.packages.append(new_package)\n self.distribution.package_dir[new_package] = new_package_dir\n\n ext.sources = [func]\n extensions.append(ext)\n self.extensions[:] = extensions\n\n def split_extension(self, ext, backend):\n fullname = self.get_ext_fullname(ext.name)\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n name = modpath[-1]\n macros = []\n macros.append((backend.upper(),None))\n new_ext = Extension(name = dot_join(package,'_%s.%s' % (backend,name)),\n sources = ext.sources,\n include_dirs = ext.include_dirs,\n define_macros = ext.define_macros + macros,\n undef_macros = ext.undef_macros,\n library_dirs = ext.library_dirs,\n libraries = ext.libraries,\n runtime_library_dirs = ext.runtime_library_dirs,\n extra_objects = ext.extra_objects,\n extra_compile_args = ext.extra_compile_args,\n extra_link_args = ext.extra_link_args,\n export_symbols = ext.export_symbols,\n depends = ext.depends,\n language = ext.language,\n f2py_options = ext.f2py_options,\n module_dirs = ext.module_dirs\n )\n new_ext.backend = backend\n return new_ext\n \n def build_sources(self):\n self.check_extensions_list(self.extensions)\n\n for ext in self.extensions:\n self.build_extension_sources(ext)\n\n for libname_info in self.libraries:\n self.build_library_sources(*libname_info)\n\n return\n\n def build_library_sources(self, lib_name, build_info):\n sources = list(build_info.get('sources',[]))\n\n if not sources:\n return\n\n log.info('building library \"%s\" sources' % (lib_name))\n\n sources = self.generate_sources(sources, (lib_name, build_info))\n\n build_info['sources'] = sources\n return\n\n def build_extension_sources(self, ext):\n sources = list(ext.sources)\n\n log.info('building extension \"%s\" sources' % (ext.name))\n\n fullname = self.get_ext_fullname(ext.name)\n\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n\n if self.inplace:\n build_py = self.get_finalized_command('build_py')\n self.ext_target_dir = build_py.get_package_dir(package)\n\n sources = self.generate_sources(sources, ext)\n\n sources = self.template_sources(sources, ext)\n \n sources = self.swig_sources(sources, ext)\n\n sources = self.f2py_sources(sources, ext)\n\n sources, py_files = self.filter_py_files(sources)\n\n if not self.py_modules.has_key(package):\n self.py_modules[package] = []\n modules = []\n for f in py_files:\n module = os.path.splitext(os.path.basename(f))[0]\n modules.append((package, module, f))\n self.py_modules[package] += modules\n\n ext.sources = sources\n return\n\n def generate_sources(self, sources, extension):\n new_sources = []\n func_sources = []\n for source in sources:\n if type(source) is type(''):\n new_sources.append(source)\n else:\n func_sources.append(source)\n if not func_sources:\n return new_sources\n if self.inplace:\n build_dir = self.ext_target_dir\n else:\n if type(extension) is type(()):\n name = extension[0]\n else:\n name = extension.name\n build_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n self.mkpath(build_dir)\n for func in func_sources:\n source = func(extension, build_dir)\n if type(source) is type([]):\n [log.info(\" adding '%s' to sources.\" % (s)) for s in source]\n new_sources.extend(source)\n else:\n log.info(\" adding '%s' to sources.\" % (source))\n new_sources.append(source)\n return new_sources\n\n def filter_py_files(self, sources):\n new_sources = []\n py_files = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext=='.py': \n py_files.append(source)\n else:\n new_sources.append(source)\n return new_sources, py_files\n\n def template_sources(self, sources, extension):\n new_sources = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.src': # Template file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n self.mkpath(target_dir)\n target_file = os.path.join(target_dir,os.path.basename(base))\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file)):\n log.info(\"from_template:> %s\" % (target_file))\n outstr = process_file(source)\n fid = open(target_file,'w')\n fid.write(outstr)\n fid.close()\n new_sources.append(target_file)\n else:\n new_sources.append(source)\n return new_sources \n \n def f2py_sources(self, sources, extension):\n new_sources = []\n f2py_sources = []\n f_sources = []\n f2py_targets = {}\n target_dirs = []\n ext_name = extension.name.split('.')[-1]\n skip_f2py = 0\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.pyf': # F2PY interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n if os.path.isfile(source):\n name = get_f2py_modulename(source)\n assert name==ext_name,'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name`\n target_file = os.path.join(target_dir,name+'module.c')\n else:\n log.debug(' source %s does not exist: skipping f2py\\'ing.' \\\n % (source))\n name = ext_name\n skip_f2py = 1\n target_file = os.path.join(target_dir,name+'module.c')\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %smodule.c was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = os.path.join(target_dir,name+'module.c')\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n f2py_sources.append(source)\n f2py_targets[source] = target_file\n new_sources.append(target_file)\n elif fortran_ext_match(ext):\n f_sources.append(source)\n else:\n new_sources.append(source)\n\n if not (f2py_sources or f_sources):\n return new_sources\n\n map(self.mkpath, target_dirs)\n\n f2py_options = extension.f2py_options + self.f2pyflags\n if f2py_sources:\n assert len(f2py_sources)==1,\\\n 'only one .pyf file is allowed per extension module but got'\\\n ' more:'+`f2py_sources`\n source = f2py_sources[0]\n target_file = f2py_targets[source]\n target_dir = os.path.dirname(target_file) or '.'\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file,'newer')) \\\n and not skip_f2py:\n log.info(\"f2py: %s\" % (source))\n import f2py2e\n f2py2e.run_main(f2py_options + ['--build-dir',target_dir,source])\n else:\n log.debug(\" skipping '%s' f2py interface (up-to-date)\" % (source))\n else:\n #XXX TODO: --inplace support for sdist command\n if type(extension) is type(()): name = extension[0]\n else: name = extension.name\n target_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n target_file = os.path.join(target_dir,ext_name + 'module.c')\n new_sources.append(target_file)\n depends = f_sources + extension.depends\n if (self.force or newer_group(depends, target_file, 'newer')) \\\n and not skip_f2py:\n import f2py2e\n log.info(\"f2py:> %s\" % (target_file))\n self.mkpath(target_dir)\n f2py2e.run_main(f2py_options + ['--lower',\n '--build-dir',target_dir]+\\\n ['-m',ext_name]+f_sources)\n else:\n log.debug(\" skipping f2py fortran files for '%s' (up-to-date)\"\\\n % (target_file))\n\n assert os.path.isfile(target_file),`target_file`+' missing'\n\n target_c = os.path.join(self.build_src,'fortranobject.c')\n target_h = os.path.join(self.build_src,'fortranobject.h')\n log.info(\" adding '%s' to sources.\" % (target_c))\n new_sources.append(target_c)\n if self.build_src not in extension.include_dirs:\n log.info(\" adding '%s' to include_dirs.\" \\\n % (self.build_src))\n extension.include_dirs.append(self.build_src)\n\n if not skip_f2py:\n import f2py2e\n d = os.path.dirname(f2py2e.__file__)\n source_c = os.path.join(d,'src','fortranobject.c')\n source_h = os.path.join(d,'src','fortranobject.h')\n if newer(source_c,target_c) or newer(source_h,target_h):\n self.mkpath(os.path.dirname(target_c))\n self.copy_file(source_c,target_c)\n self.copy_file(source_h,target_h)\n else:\n assert os.path.isfile(target_c),`target_c` + ' missing'\n assert os.path.isfile(target_h),`target_h` + ' missing'\n \n for name_ext in ['-f2pywrappers.f','-f2pywrappers2.f90']:\n filename = os.path.join(target_dir,ext_name + name_ext)\n if os.path.isfile(filename):\n log.info(\" adding '%s' to sources.\" % (filename))\n f_sources.append(filename)\n\n return new_sources + f_sources\n\n def swig_sources(self, sources, extension):\n # Assuming SWIG 1.3.14 or later. See compatibility note in\n # http://www.swig.org/Doc1.3/Python.html#Python_nn6\n\n new_sources = []\n swig_sources = []\n swig_targets = {}\n target_dirs = []\n py_files = [] # swig generated .py files\n target_ext = '.c'\n typ = None\n is_cpp = 0\n skip_swig = 0\n ext_name = extension.name.split('.')[-1]\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.i': # SWIG interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n py_target_dir = self.ext_target_dir\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n py_target_dir = target_dir\n if os.path.isfile(source):\n name = get_swig_modulename(source)\n assert name==ext_name[1:],'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name[1:]`\n if typ is None:\n typ = get_swig_target(source)\n is_cpp = typ=='c++'\n if is_cpp:\n target_ext = '.cpp'\n else:\n assert typ == get_swig_target(source),`typ`\n target_file = os.path.join(target_dir,'%s_wrap%s' \\\n % (name, target_ext))\n else:\n log.debug(' source %s does not exist: skipping swig\\'ing.' \\\n % (source))\n name = ext_name[1:]\n skip_swig = 1\n target_file = _find_swig_target(target_dir, name)\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %s_wrap.{c,cpp} was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = _find_swig_target(target_dir, name)\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n new_sources.append(target_file)\n py_files.append(os.path.join(py_target_dir, name+'.py'))\n swig_sources.append(source)\n swig_targets[source] = new_sources[-1]\n else:\n new_sources.append(source)\n\n if not swig_sources:\n return new_sources\n\n if skip_swig:\n return new_sources + py_files\n\n map(self.mkpath, target_dirs)\n swig = self.find_swig()\n swig_cmd = [swig, \"-python\"]\n if is_cpp:\n swig_cmd.append('-c++')\n for d in extension.include_dirs:\n swig_cmd.append('-I'+d)\n for source in swig_sources:\n target = swig_targets[source]\n depends = [source] + extension.depends\n if self.force or newer_group(depends, target, 'newer'):\n log.info(\"%s: %s\" % (os.path.basename(swig) \\\n + (is_cpp and '++' or ''), source))\n self.spawn(swig_cmd + self.swigflags \\\n + [\"-o\", target, '-outdir', py_target_dir, source])\n else:\n log.debug(\" skipping '%s' swig interface (up-to-date)\" \\\n % (source))\n\n return new_sources + py_files\n\ndef appendpath(prefix,path):\n if os.path.isabs(path):\n absprefix = os.path.abspath(prefix)\n d = os.path.commonprefix([absprefix,path])\n subpath = path[len(d):]\n assert not os.path.isabs(subpath),`subpath`\n return os.path.normpath(os.path.join(prefix,subpath))\n return os.path.normpath(os.path.join(prefix, path))\n\n#### SWIG related auxiliary functions ####\n_swig_module_name_match = re.compile(r'\\s*%module\\s*(?P[\\w_]+)',\n re.I).match\n_has_c_header = re.compile(r'-[*]-\\s*c\\s*-[*]-',re.I).search\n_has_cpp_header = re.compile(r'-[*]-\\s*c[+][+]\\s*-[*]-',re.I).search\n\ndef get_swig_target(source):\n f = open(source,'r')\n result = 'c'\n line = f.readline()\n if _has_cpp_header(line):\n result = 'c++'\n if _has_c_header(line):\n result = 'c'\n f.close()\n return result\n\ndef get_swig_modulename(source):\n f = open(source,'r')\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _swig_module_name_match(line)\n if m:\n name = m.group('name')\n break\n f.close()\n return name\n\ndef _find_swig_target(target_dir,name):\n for ext in ['.cpp','.c']:\n target = os.path.join(target_dir,'%s_wrap%s' % (name, ext))\n if os.path.isfile(target):\n break\n return target\n\n#### F2PY related auxiliary functions ####\n\n_f2py_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]+)',\n re.I).match\n_f2py_user_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]*?'\\\n '__user__[\\w_]*)',re.I).match\n\ndef get_f2py_modulename(source):\n name = None\n f = open(source)\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _f2py_module_name_match(line)\n if m:\n if _f2py_user_module_name_match(line): # skip *__user__* names\n continue\n name = m.group('name')\n break\n f.close()\n return name\n\n##########################################\n", "source_code_before": "\"\"\" Build swig, f2py, weave, sources.\n\"\"\"\n\nimport os\nimport re\n\nfrom distutils.cmd import Command\nfrom distutils.command import build_ext, build_py\nfrom distutils.util import convert_path\nfrom distutils.dep_util import newer_group, newer\n\nfrom scipy_distutils import log\nfrom scipy_distutils.misc_util import fortran_ext_match, all_strings, dot_join\nfrom scipy_distutils.from_template import process_file\nfrom scipy_distutils.extension import Extension\n\n_split_ext_template = '''\nimport os\nimport sys\n\n_which = None, None\n_backends = %(backends)r\nif hasattr(sys, \"argv\"):\n i = -1\n for a in sys.argv:\n i += 1\n if a.lower()[2:] in _backends: \n _which = a.lower()[2:], \"command line\"\n del sys.argv[i]\n os.environ[_which[0].upper()] = _which[0]\n break\n del a\n\nif _which[0] is None:\n for b in _backends:\n if os.environ.get(b.upper(),None):\n _which = b, \"environment var\"\n break\n del b\n\nif _which[0] is None:\n _which = _backends[0], \"defaulted\"\n\nexec \"from _\" + _which[0] + \".%(name)s import *\"\n'''\n\n\nclass build_src(build_ext.build_ext):\n\n description = \"build sources from SWIG, F2PY files or a function\"\n\n user_options = [\n ('build-src=', 'd', \"directory to \\\"build\\\" sources to\"),\n ('f2pyflags=', None, \"additonal flags to f2py\"),\n ('swigflags=', None, \"additional flags to swig\"),\n ('force', 'f', \"forcibly build everything (ignore file timestamps)\"),\n ('inplace', 'i',\n \"ignore build-lib and put compiled extensions into the source \" +\n \"directory alongside your pure Python modules\"),\n ]\n\n boolean_options = ['force','inplace']\n\n help_options = []\n\n def initialize_options(self):\n self.extensions = None\n self.package = None\n self.py_modules = None\n self.build_src = None\n self.build_lib = None\n self.build_base = None\n self.force = None\n self.inplace = None\n self.package_dir = None\n self.f2pyflags = None\n self.swigflags = None\n self.backends = None\n return\n\n def finalize_options(self):\n self.set_undefined_options('build',\n ('build_base', 'build_base'),\n ('build_lib', 'build_lib'),\n ('force', 'force'))\n if self.package is None:\n self.package = self.distribution.ext_package\n self.extensions = self.distribution.ext_modules\n self.libraries = self.distribution.libraries or []\n self.py_modules = self.distribution.py_modules\n if self.build_src is None:\n self.build_src = os.path.join(self.build_base, 'src')\n if self.inplace is None:\n build_ext = self.get_finalized_command('build_ext')\n self.inplace = build_ext.inplace\n if self.backends is None:\n build_ext = self.get_finalized_command('build_ext')\n self.backends = build_ext.backends\n\n # py_modules is used in build_py.find_package_modules\n self.py_modules = {}\n\n if self.f2pyflags is None:\n self.f2pyflags = []\n else:\n self.f2pyflags = self.f2pyflags.split() # XXX spaces??\n\n if self.swigflags is None:\n self.swigflags = []\n else:\n self.swigflags = self.swigflags.split() # XXX spaces??\n return\n\n def run(self):\n if not (self.extensions or self.libraries):\n return\n if self.backends is not None:\n self.backend_split()\n self.build_sources()\n return\n\n def backend_split(self):\n log.info('splitting extensions for backends: %s' % (self.backends))\n extensions = []\n backends = self.backends.split(',')\n for ext in self.extensions:\n name = ext.name.split('.')[-1]\n fullname = self.get_ext_fullname(ext.name)\n def func(extension, src_dir):\n source = os.path.join(os.path.dirname(src_dir),name+'.py')\n if newer(__file__, source):\n f = open(source,'w')\n f.write(_split_ext_template \\\n % {'name':name,'fullname':fullname,\n 'backends':backends})\n f.close()\n return [ source ]\n def func_init(extension, src_dir):\n source = os.path.join(src_dir,'__init__.py')\n if newer(__file__, source):\n f = open(source,'w')\n f.close()\n return [source]\n for b in backends:\n new_ext = self.split_extension(ext,b)\n new_ext.sources.append(func_init)\n extensions.append(new_ext)\n\n new_package = dot_join(*(ext.name.split('.')[:-1]+['_'+b]))\n new_package_dir = os.path.join(*([self.build_src]+ext.name.split('.')[:-1]+['_'+b]))\n if new_package not in self.distribution.packages:\n self.distribution.packages.append(new_package)\n self.distribution.package_dir[new_package] = new_package_dir\n\n ext.sources = [func]\n extensions.append(ext)\n self.extensions[:] = extensions\n\n def split_extension(self, ext, backend):\n fullname = self.get_ext_fullname(ext.name)\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n name = modpath[-1]\n macros = []\n macros.append((backend.upper(),None))\n new_ext = Extension(name = dot_join(package,'_%s.%s' % (backend,name)),\n sources = ext.sources,\n include_dirs = ext.include_dirs,\n define_macros = ext.define_macros + macros,\n undef_macros = ext.undef_macros,\n library_dirs = ext.library_dirs,\n libraries = ext.libraries,\n runtime_library_dirs = ext.runtime_library_dirs,\n extra_objects = ext.extra_objects,\n extra_compile_args = ext.extra_compile_args,\n extra_link_args = ext.extra_link_args,\n export_symbols = ext.export_symbols,\n depends = ext.depends,\n language = ext.language,\n f2py_options = ext.f2py_options,\n module_dirs = ext.module_dirs\n )\n new_ext.backend = backend\n return new_ext\n \n def build_sources(self):\n self.check_extensions_list(self.extensions)\n\n for ext in self.extensions:\n self.build_extension_sources(ext)\n\n for libname_info in self.libraries:\n self.build_library_sources(*libname_info)\n\n return\n\n def build_library_sources(self, lib_name, build_info):\n sources = list(build_info.get('sources',[]))\n\n if not sources:\n return\n\n log.info('building library \"%s\" sources' % (lib_name))\n\n sources = self.generate_sources(sources, (lib_name, build_info))\n\n build_info['sources'] = sources\n return\n\n def build_extension_sources(self, ext):\n sources = list(ext.sources)\n\n log.info('building extension \"%s\" sources' % (ext.name))\n\n fullname = self.get_ext_fullname(ext.name)\n\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n\n if self.inplace:\n build_py = self.get_finalized_command('build_py')\n self.ext_target_dir = build_py.get_package_dir(package)\n\n sources = self.generate_sources(sources, ext)\n\n sources = self.template_sources(sources, ext)\n \n sources = self.swig_sources(sources, ext)\n\n sources = self.f2py_sources(sources, ext)\n\n sources, py_files = self.filter_py_files(sources)\n\n if not self.py_modules.has_key(package):\n self.py_modules[package] = []\n modules = []\n for f in py_files:\n module = os.path.splitext(os.path.basename(f))[0]\n modules.append((package, module, f))\n self.py_modules[package] += modules\n\n ext.sources = sources\n return\n\n def generate_sources(self, sources, extension):\n new_sources = []\n func_sources = []\n for source in sources:\n if type(source) is type(''):\n new_sources.append(source)\n else:\n func_sources.append(source)\n if not func_sources:\n return new_sources\n if self.inplace:\n build_dir = self.ext_target_dir\n else:\n if type(extension) is type(()):\n name = extension[0]\n else:\n name = extension.name\n build_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n self.mkpath(build_dir)\n for func in func_sources:\n source = func(extension, build_dir)\n if type(source) is type([]):\n [log.info(\" adding '%s' to sources.\" % (s)) for s in source]\n new_sources.extend(source)\n else:\n log.info(\" adding '%s' to sources.\" % (source))\n new_sources.append(source)\n return new_sources\n\n def filter_py_files(self, sources):\n new_sources = []\n py_files = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext=='.py': \n py_files.append(source)\n else:\n new_sources.append(source)\n return new_sources, py_files\n\n def template_sources(self, sources, extension):\n new_sources = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.src': # Template file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n self.mkpath(target_dir)\n target_file = os.path.join(target_dir,os.path.basename(base))\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file)):\n log.info(\"from_template:> %s\" % (target_file))\n outstr = process_file(source)\n fid = open(target_file,'w')\n fid.write(outstr)\n fid.close()\n new_sources.append(target_file)\n else:\n new_sources.append(source)\n return new_sources \n \n def f2py_sources(self, sources, extension):\n new_sources = []\n f2py_sources = []\n f_sources = []\n f2py_targets = {}\n target_dirs = []\n ext_name = extension.name.split('.')[-1]\n skip_f2py = 0\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.pyf': # F2PY interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n if os.path.isfile(source):\n name = get_f2py_modulename(source)\n assert name==ext_name,'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name`\n target_file = os.path.join(target_dir,name+'module.c')\n else:\n log.debug(' source %s does not exist: skipping f2py\\'ing.' \\\n % (source))\n name = ext_name\n skip_f2py = 1\n target_file = os.path.join(target_dir,name+'module.c')\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %smodule.c was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = os.path.join(target_dir,name+'module.c')\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n f2py_sources.append(source)\n f2py_targets[source] = target_file\n new_sources.append(target_file)\n elif fortran_ext_match(ext):\n f_sources.append(source)\n else:\n new_sources.append(source)\n\n if not (f2py_sources or f_sources):\n return new_sources\n\n map(self.mkpath, target_dirs)\n\n f2py_options = extension.f2py_options + self.f2pyflags\n if f2py_sources:\n assert len(f2py_sources)==1,\\\n 'only one .pyf file is allowed per extension module but got'\\\n ' more:'+`f2py_sources`\n source = f2py_sources[0]\n target_file = f2py_targets[source]\n target_dir = os.path.dirname(target_file) or '.'\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file,'newer')) \\\n and not skip_f2py:\n log.info(\"f2py: %s\" % (source))\n import f2py2e\n f2py2e.run_main(f2py_options + ['--build-dir',target_dir,source])\n else:\n log.debug(\" skipping '%s' f2py interface (up-to-date)\" % (source))\n else:\n #XXX TODO: --inplace support for sdist command\n if type(extension) is type(()): name = extension[0]\n else: name = extension.name\n target_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n target_file = os.path.join(target_dir,ext_name + 'module.c')\n new_sources.append(target_file)\n depends = f_sources + extension.depends\n if (self.force or newer_group(depends, target_file, 'newer')) \\\n and not skip_f2py:\n import f2py2e\n log.info(\"f2py:> %s\" % (target_file))\n self.mkpath(target_dir)\n f2py2e.run_main(f2py_options + ['--lower',\n '--build-dir',target_dir]+\\\n ['-m',ext_name]+f_sources)\n else:\n log.debug(\" skipping f2py fortran files for '%s' (up-to-date)\"\\\n % (target_file))\n\n assert os.path.isfile(target_file),`target_file`+' missing'\n\n target_c = os.path.join(self.build_src,'fortranobject.c')\n target_h = os.path.join(self.build_src,'fortranobject.h')\n log.info(\" adding '%s' to sources.\" % (target_c))\n new_sources.append(target_c)\n if self.build_src not in extension.include_dirs:\n log.info(\" adding '%s' to include_dirs.\" \\\n % (self.build_src))\n extension.include_dirs.append(self.build_src)\n\n if not skip_f2py:\n import f2py2e\n d = os.path.dirname(f2py2e.__file__)\n source_c = os.path.join(d,'src','fortranobject.c')\n source_h = os.path.join(d,'src','fortranobject.h')\n if newer(source_c,target_c) or newer(source_h,target_h):\n self.mkpath(os.path.dirname(target_c))\n self.copy_file(source_c,target_c)\n self.copy_file(source_h,target_h)\n else:\n assert os.path.isfile(target_c),`target_c` + ' missing'\n assert os.path.isfile(target_h),`target_h` + ' missing'\n \n for name_ext in ['-f2pywrappers.f','-f2pywrappers2.f90']:\n filename = os.path.join(target_dir,ext_name + name_ext)\n if os.path.isfile(filename):\n log.info(\" adding '%s' to sources.\" % (filename))\n f_sources.append(filename)\n\n return new_sources + f_sources\n\n def swig_sources(self, sources, extension):\n # Assuming SWIG 1.3.14 or later. See compatibility note in\n # http://www.swig.org/Doc1.3/Python.html#Python_nn6\n\n new_sources = []\n swig_sources = []\n swig_targets = {}\n target_dirs = []\n py_files = [] # swig generated .py files\n target_ext = '.c'\n typ = None\n is_cpp = 0\n skip_swig = 0\n ext_name = extension.name.split('.')[-1]\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.i': # SWIG interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n py_target_dir = self.ext_target_dir\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n py_target_dir = target_dir\n if os.path.isfile(source):\n name = get_swig_modulename(source)\n assert name==ext_name[1:],'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name[1:]`\n if typ is None:\n typ = get_swig_target(source)\n is_cpp = typ=='c++'\n if is_cpp:\n target_ext = '.cpp'\n else:\n assert typ == get_swig_target(source),`typ`\n target_file = os.path.join(target_dir,'%s_wrap%s' \\\n % (name, target_ext))\n else:\n log.debug(' source %s does not exist: skipping swig\\'ing.' \\\n % (source))\n name = ext_name[1:]\n skip_swig = 1\n target_file = _find_swig_target(target_dir, name)\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %s_wrap.{c,cpp} was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = _find_swig_target(target_dir, name)\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n new_sources.append(target_file)\n py_files.append(os.path.join(py_target_dir, name+'.py'))\n swig_sources.append(source)\n swig_targets[source] = new_sources[-1]\n else:\n new_sources.append(source)\n\n if not swig_sources:\n return new_sources\n\n if skip_swig:\n return new_sources + py_files\n\n map(self.mkpath, target_dirs)\n swig = self.find_swig()\n swig_cmd = [swig, \"-python\"]\n if is_cpp:\n swig_cmd.append('-c++')\n for d in extension.include_dirs:\n swig_cmd.append('-I'+d)\n for source in swig_sources:\n target = swig_targets[source]\n depends = [source] + extension.depends\n if self.force or newer_group(depends, target, 'newer'):\n log.info(\"%s: %s\" % (os.path.basename(swig) \\\n + (is_cpp and '++' or ''), source))\n self.spawn(swig_cmd + self.swigflags \\\n + [\"-o\", target, '-outdir', py_target_dir, source])\n else:\n log.debug(\" skipping '%s' swig interface (up-to-date)\" \\\n % (source))\n\n return new_sources + py_files\n\ndef appendpath(prefix,path):\n if os.path.isabs(path):\n absprefix = os.path.abspath(prefix)\n d = os.path.commonprefix([absprefix,path])\n subpath = path[len(d):]\n assert not os.path.isabs(subpath),`subpath`\n return os.path.normpath(os.path.join(prefix,subpath))\n return os.path.normpath(os.path.join(prefix, path))\n\n#### SWIG related auxiliary functions ####\n_swig_module_name_match = re.compile(r'\\s*%module\\s*(?P[\\w_]+)',\n re.I).match\n_has_c_header = re.compile(r'-[*]-\\s*c\\s*-[*]-',re.I).search\n_has_cpp_header = re.compile(r'-[*]-\\s*c[+][+]\\s*-[*]-',re.I).search\n\ndef get_swig_target(source):\n f = open(source,'r')\n result = 'c'\n line = f.readline()\n if _has_cpp_header(line):\n result = 'c++'\n if _has_c_header(line):\n result = 'c'\n f.close()\n return result\n\ndef get_swig_modulename(source):\n f = open(source,'r')\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _swig_module_name_match(line)\n if m:\n name = m.group('name')\n break\n f.close()\n return name\n\ndef _find_swig_target(target_dir,name):\n for ext in ['.cpp','.c']:\n target = os.path.join(target_dir,'%s_wrap%s' % (name, ext))\n if os.path.isfile(target):\n break\n return target\n\n#### F2PY related auxiliary functions ####\n\n_f2py_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]+)',\n re.I).match\n_f2py_user_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]*?'\\\n '__user__[\\w_]*)',re.I).match\n\ndef get_f2py_modulename(source):\n name = None\n f = open(source)\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _f2py_module_name_match(line)\n if m:\n if _f2py_user_module_name_match(line): # skip *__user__* names\n continue\n name = m.group('name')\n break\n f.close()\n return name\n\n##########################################\n", "methods": [ { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_src.py", "nloc": 14, "complexity": 1, "token_count": 66, "parameters": [ "self" ], "start_line": 70, "end_line": 83, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 1 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_src.py", "nloc": 28, "complexity": 8, "token_count": 201, "parameters": [ "self" ], "start_line": 85, "end_line": 116, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 32, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_src.py", "nloc": 7, "complexity": 4, "token_count": 37, "parameters": [ "self" ], "start_line": 118, "end_line": 124, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "backend_split.func", "long_name": "backend_split.func( extension , src_dir , name = name , fullname = fullname , backends = backends )", "filename": "build_src.py", "nloc": 12, "complexity": 2, "token_count": 86, "parameters": [ "extension", "src_dir", "name", "fullname", "backends" ], "start_line": 133, "end_line": 144, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 3 }, { "name": "backend_split.func_init", "long_name": "backend_split.func_init( extension , src_dir )", "filename": "build_src.py", "nloc": 6, "complexity": 2, "token_count": 44, "parameters": [ "extension", "src_dir" ], "start_line": 145, "end_line": 150, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 3 }, { "name": "backend_split", "long_name": "backend_split( self )", "filename": "build_src.py", "nloc": 21, "complexity": 4, "token_count": 210, "parameters": [ "self" ], "start_line": 126, "end_line": 164, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 39, "top_nesting_level": 1 }, { "name": "split_extension", "long_name": "split_extension( self , ext , backend )", "filename": "build_src.py", "nloc": 26, "complexity": 1, "token_count": 184, "parameters": [ "self", "ext", "backend" ], "start_line": 166, "end_line": 191, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "build_sources", "long_name": "build_sources( self )", "filename": "build_src.py", "nloc": 7, "complexity": 3, "token_count": 41, "parameters": [ "self" ], "start_line": 193, "end_line": 202, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "build_library_sources", "long_name": "build_library_sources( self , lib_name , build_info )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 59, "parameters": [ "self", "lib_name", "build_info" ], "start_line": 204, "end_line": 215, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 1 }, { "name": "build_extension_sources", "long_name": "build_extension_sources( self , ext )", "filename": "build_src.py", "nloc": 23, "complexity": 4, "token_count": 207, "parameters": [ "self", "ext" ], "start_line": 217, "end_line": 250, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 34, "top_nesting_level": 1 }, { "name": "generate_sources", "long_name": "generate_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 29, "complexity": 9, "token_count": 193, "parameters": [ "self", "sources", "extension" ], "start_line": 252, "end_line": 280, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 1 }, { "name": "filter_py_files", "long_name": "filter_py_files( self , sources )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "self", "sources" ], "start_line": 282, "end_line": 291, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "template_sources", "long_name": "template_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 22, "complexity": 6, "token_count": 170, "parameters": [ "self", "sources", "extension" ], "start_line": 293, "end_line": 314, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "f2py_sources", "long_name": "f2py_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 110, "complexity": 24, "token_count": 874, "parameters": [ "self", "sources", "extension" ], "start_line": 316, "end_line": 435, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 120, "top_nesting_level": 1 }, { "name": "swig_sources", "long_name": "swig_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 80, "complexity": 17, "token_count": 539, "parameters": [ "self", "sources", "extension" ], "start_line": 437, "end_line": 524, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 88, "top_nesting_level": 1 }, { "name": "appendpath", "long_name": "appendpath( prefix , path )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 101, "parameters": [ "prefix", "path" ], "start_line": 526, "end_line": 533, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "get_swig_target", "long_name": "get_swig_target( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 48, "parameters": [ "source" ], "start_line": 541, "end_line": 550, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "get_swig_modulename", "long_name": "get_swig_modulename( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "source" ], "start_line": 552, "end_line": 561, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "_find_swig_target", "long_name": "_find_swig_target( target_dir , name )", "filename": "build_src.py", "nloc": 6, "complexity": 3, "token_count": 47, "parameters": [ "target_dir", "name" ], "start_line": 563, "end_line": 568, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "get_f2py_modulename", "long_name": "get_f2py_modulename( source )", "filename": "build_src.py", "nloc": 13, "complexity": 4, "token_count": 65, "parameters": [ "source" ], "start_line": 577, "end_line": 589, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 0 } ], "methods_before": [ { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_src.py", "nloc": 14, "complexity": 1, "token_count": 66, "parameters": [ "self" ], "start_line": 66, "end_line": 79, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 1 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_src.py", "nloc": 28, "complexity": 8, "token_count": 201, "parameters": [ "self" ], "start_line": 81, "end_line": 112, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 32, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_src.py", "nloc": 7, "complexity": 4, "token_count": 37, "parameters": [ "self" ], "start_line": 114, "end_line": 120, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "backend_split.func", "long_name": "backend_split.func( extension , src_dir )", "filename": "build_src.py", "nloc": 9, "complexity": 2, "token_count": 74, "parameters": [ "extension", "src_dir" ], "start_line": 129, "end_line": 137, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 3 }, { "name": "backend_split.func_init", "long_name": "backend_split.func_init( extension , src_dir )", "filename": "build_src.py", "nloc": 6, "complexity": 2, "token_count": 44, "parameters": [ "extension", "src_dir" ], "start_line": 138, "end_line": 143, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 3 }, { "name": "backend_split", "long_name": "backend_split( self )", "filename": "build_src.py", "nloc": 21, "complexity": 4, "token_count": 210, "parameters": [ "self" ], "start_line": 122, "end_line": 157, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "split_extension", "long_name": "split_extension( self , ext , backend )", "filename": "build_src.py", "nloc": 26, "complexity": 1, "token_count": 184, "parameters": [ "self", "ext", "backend" ], "start_line": 159, "end_line": 184, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "build_sources", "long_name": "build_sources( self )", "filename": "build_src.py", "nloc": 7, "complexity": 3, "token_count": 41, "parameters": [ "self" ], "start_line": 186, "end_line": 195, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "build_library_sources", "long_name": "build_library_sources( self , lib_name , build_info )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 59, "parameters": [ "self", "lib_name", "build_info" ], "start_line": 197, "end_line": 208, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 1 }, { "name": "build_extension_sources", "long_name": "build_extension_sources( self , ext )", "filename": "build_src.py", "nloc": 23, "complexity": 4, "token_count": 207, "parameters": [ "self", "ext" ], "start_line": 210, "end_line": 243, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 34, "top_nesting_level": 1 }, { "name": "generate_sources", "long_name": "generate_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 29, "complexity": 9, "token_count": 193, "parameters": [ "self", "sources", "extension" ], "start_line": 245, "end_line": 273, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 1 }, { "name": "filter_py_files", "long_name": "filter_py_files( self , sources )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "self", "sources" ], "start_line": 275, "end_line": 284, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "template_sources", "long_name": "template_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 22, "complexity": 6, "token_count": 170, "parameters": [ "self", "sources", "extension" ], "start_line": 286, "end_line": 307, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "f2py_sources", "long_name": "f2py_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 110, "complexity": 24, "token_count": 874, "parameters": [ "self", "sources", "extension" ], "start_line": 309, "end_line": 428, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 120, "top_nesting_level": 1 }, { "name": "swig_sources", "long_name": "swig_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 80, "complexity": 17, "token_count": 539, "parameters": [ "self", "sources", "extension" ], "start_line": 430, "end_line": 517, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 88, "top_nesting_level": 1 }, { "name": "appendpath", "long_name": "appendpath( prefix , path )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 101, "parameters": [ "prefix", "path" ], "start_line": 519, "end_line": 526, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "get_swig_target", "long_name": "get_swig_target( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 48, "parameters": [ "source" ], "start_line": 534, "end_line": 543, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "get_swig_modulename", "long_name": "get_swig_modulename( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "source" ], "start_line": 545, "end_line": 554, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "_find_swig_target", "long_name": "_find_swig_target( target_dir , name )", "filename": "build_src.py", "nloc": 6, "complexity": 3, "token_count": 47, "parameters": [ "target_dir", "name" ], "start_line": 556, "end_line": 561, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "get_f2py_modulename", "long_name": "get_f2py_modulename( source )", "filename": "build_src.py", "nloc": 13, "complexity": 4, "token_count": 65, "parameters": [ "source" ], "start_line": 570, "end_line": 582, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "backend_split", "long_name": "backend_split( self )", "filename": "build_src.py", "nloc": 21, "complexity": 4, "token_count": 210, "parameters": [ "self" ], "start_line": 126, "end_line": 164, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 39, "top_nesting_level": 1 }, { "name": "backend_split.func", "long_name": "backend_split.func( extension , src_dir , name = name , fullname = fullname , backends = backends )", "filename": "build_src.py", "nloc": 12, "complexity": 2, "token_count": 86, "parameters": [ "extension", "src_dir", "name", "fullname", "backends" ], "start_line": 133, "end_line": 144, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 3 }, { "name": "backend_split.func", "long_name": "backend_split.func( extension , src_dir )", "filename": "build_src.py", "nloc": 9, "complexity": 2, "token_count": 74, "parameters": [ "extension", "src_dir" ], "start_line": 129, "end_line": 137, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 3 } ], "nloc": 514, "complexity": 105, "token_count": 3509, "diff_parsed": { "added": [ "exec \"import _\" + _which[0] + \".%(name)s as ___m\"", "for ___a in dir(___m):", " exec ___a + \" = getattr(___m,___a)\"", "else:", " del ___a, ___m", " def func(extension, src_dir,", " name=name,", " fullname=fullname,", " backends=backends):" ], "deleted": [ "exec \"from _\" + _which[0] + \".%(name)s import *\"", " def func(extension, src_dir):" ] } } ] }, { "hash": "f8929d2f79dc9b1e3abef1e1b15b0b5577bbcd3e", "msg": "Clean up", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-03-22T09:10:22+00:00", "author_timezone": 0, "committer_date": "2005-03-22T09:10:22+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "64fb0eeb6af126e9757435c90c5d9165dc783881" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 5, "insertions": 2, "lines": 7, "files": 1, "dmm_unit_size": 1.0, "dmm_unit_complexity": 1.0, "dmm_unit_interfacing": 0.0, "modified_files": [ { "old_path": "scipy_base/setup_scipy_base.py", "new_path": "scipy_base/setup_scipy_base.py", "filename": "setup_scipy_base.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -28,12 +28,8 @@ def configuration(parent_package='',parent_path=None):\n 'fastumath_nounsigned.inc',\n '_scipy_mapping.c',\n '_scipy_number.c']\n- depends = umath_c_sources # ????\n- depends = [os.path.join(local_path,x) for x in depends]\n umath_c_sources = [os.path.join(local_path,x) for x in umath_c_sources]\n- umath_c = SourceGenerator(func = None,\n- target = os.path.join(local_path,'fastumathmodule.c'),\n- sources = umath_c_sources)\n+ umath_c = os.path.join(local_path,'fastumathmodule.c')\n sources = [umath_c, os.path.join(local_path,'isnan.c')]\n define_macros = []\n undef_macros = []\n@@ -92,6 +88,7 @@ def compiled_base_c(ext,src_dir):\n dict_append(ext_args,**numarray_info)\n config['ext_modules'].append(Extension(**ext_args))\n \n+\n # display_test module\n sources = [os.path.join(local_path,'src','display_test.c')]\n x11 = get_info('x11')\n", "added_lines": 2, "deleted_lines": 5, "source_code": "#!/usr/bin/env python\nimport os, sys\nfrom glob import glob\n \ndef configuration(parent_package='',parent_path=None):\n from scipy_distutils.system_info import get_info, dict_append\n from scipy_distutils.core import Extension\n from scipy_distutils.misc_util import get_path,default_config_dict,dot_join\n from scipy_distutils.misc_util import get_path,default_config_dict,\\\n dot_join,SourceGenerator\n from distutils.dep_util import newer\n from distutils.file_util import copy_file\n\n package = 'scipy_base'\n local_path = get_path(__name__,parent_path)\n config = default_config_dict(package,parent_package)\n\n numpy_info = get_info('numpy',notfound_action=2)\n\n # extra_compile_args -- trying to find something that is binary compatible\n # with msvc for returning Py_complex from functions\n extra_compile_args=[]\n \n # fastumath module\n # scipy_base.fastumath module\n umath_c_sources = ['fastumathmodule.c',\n 'fastumath_unsigned.inc',\n 'fastumath_nounsigned.inc',\n '_scipy_mapping.c',\n '_scipy_number.c']\n umath_c_sources = [os.path.join(local_path,x) for x in umath_c_sources]\n umath_c = os.path.join(local_path,'fastumathmodule.c')\n sources = [umath_c, os.path.join(local_path,'isnan.c')]\n define_macros = []\n undef_macros = []\n libraries = []\n if sys.byteorder == \"little\":\n define_macros.append(('USE_MCONF_LITE_LE',None))\n else:\n define_macros.append(('USE_MCONF_LITE_BE',None))\n if sys.platform in ['win32']:\n undef_macros.append('HAVE_INVERSE_HYPERBOLIC')\n else:\n libraries.append('m')\n define_macros.append(('HAVE_INVERSE_HYPERBOLIC',None))\n\n ext_args = {'name':dot_join(package,'fastumath'),\n 'sources':sources,\n 'define_macros': define_macros,\n 'undef_macros': undef_macros,\n 'libraries': libraries,\n 'extra_compile_args': extra_compile_args,\n 'depends': umath_c_sources}\n dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n\n # _nc_compiled_base and _na_compiled_base modules\n\n _compiled_base_c = os.path.join(local_path,'_compiled_base.c')\n def compiled_base_c(ext,src_dir):\n source = os.path.join(src_dir,ext.name.split('.')[-1] + '.c')\n if newer(_compiled_base_c,source):\n copy_file(_compiled_base_c,source)\n return [source]\n\n ext_args = {}\n dict_append(ext_args,\n name=dot_join(package,'_nc_compiled_base'),\n sources = [compiled_base_c],\n depends = [_compiled_base_c],\n define_macros = [('NUMERIC',None)],\n include_dirs = [local_path]\n )\n dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n numarray_info = get_info('numarray')\n if numarray_info:\n ext_args = {}\n dict_append(ext_args,\n name=dot_join(package,'_na_compiled_base'),\n sources = [compiled_base_c],\n depends = [_compiled_base_c],\n define_macros = [('NUMARRAY',None)],\n include_dirs = [local_path]\n )\n dict_append(ext_args,**numarray_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n\n # display_test module\n sources = [os.path.join(local_path,'src','display_test.c')]\n x11 = get_info('x11')\n if x11:\n x11['define_macros'] = [('HAVE_X11',None)]\n ext = Extension(dot_join(package,'display_test'), sources, **x11)\n config['ext_modules'].append(ext)\n\n return config\n\nif __name__ == '__main__':\n from scipy_base_version import scipy_base_version\n print 'scipy_base Version',scipy_base_version\n from scipy_distutils.core import setup\n\n setup(version = scipy_base_version,\n maintainer = \"SciPy Developers\",\n maintainer_email = \"scipy-dev@scipy.org\",\n description = \"SciPy base module\",\n url = \"http://www.scipy.org\",\n license = \"SciPy License (BSD Style)\",\n **configuration(parent_path='')\n )\n", "source_code_before": "#!/usr/bin/env python\nimport os, sys\nfrom glob import glob\n \ndef configuration(parent_package='',parent_path=None):\n from scipy_distutils.system_info import get_info, dict_append\n from scipy_distutils.core import Extension\n from scipy_distutils.misc_util import get_path,default_config_dict,dot_join\n from scipy_distutils.misc_util import get_path,default_config_dict,\\\n dot_join,SourceGenerator\n from distutils.dep_util import newer\n from distutils.file_util import copy_file\n\n package = 'scipy_base'\n local_path = get_path(__name__,parent_path)\n config = default_config_dict(package,parent_package)\n\n numpy_info = get_info('numpy',notfound_action=2)\n\n # extra_compile_args -- trying to find something that is binary compatible\n # with msvc for returning Py_complex from functions\n extra_compile_args=[]\n \n # fastumath module\n # scipy_base.fastumath module\n umath_c_sources = ['fastumathmodule.c',\n 'fastumath_unsigned.inc',\n 'fastumath_nounsigned.inc',\n '_scipy_mapping.c',\n '_scipy_number.c']\n depends = umath_c_sources # ????\n depends = [os.path.join(local_path,x) for x in depends]\n umath_c_sources = [os.path.join(local_path,x) for x in umath_c_sources]\n umath_c = SourceGenerator(func = None,\n target = os.path.join(local_path,'fastumathmodule.c'),\n sources = umath_c_sources)\n sources = [umath_c, os.path.join(local_path,'isnan.c')]\n define_macros = []\n undef_macros = []\n libraries = []\n if sys.byteorder == \"little\":\n define_macros.append(('USE_MCONF_LITE_LE',None))\n else:\n define_macros.append(('USE_MCONF_LITE_BE',None))\n if sys.platform in ['win32']:\n undef_macros.append('HAVE_INVERSE_HYPERBOLIC')\n else:\n libraries.append('m')\n define_macros.append(('HAVE_INVERSE_HYPERBOLIC',None))\n\n ext_args = {'name':dot_join(package,'fastumath'),\n 'sources':sources,\n 'define_macros': define_macros,\n 'undef_macros': undef_macros,\n 'libraries': libraries,\n 'extra_compile_args': extra_compile_args,\n 'depends': umath_c_sources}\n dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n\n # _nc_compiled_base and _na_compiled_base modules\n\n _compiled_base_c = os.path.join(local_path,'_compiled_base.c')\n def compiled_base_c(ext,src_dir):\n source = os.path.join(src_dir,ext.name.split('.')[-1] + '.c')\n if newer(_compiled_base_c,source):\n copy_file(_compiled_base_c,source)\n return [source]\n\n ext_args = {}\n dict_append(ext_args,\n name=dot_join(package,'_nc_compiled_base'),\n sources = [compiled_base_c],\n depends = [_compiled_base_c],\n define_macros = [('NUMERIC',None)],\n include_dirs = [local_path]\n )\n dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n numarray_info = get_info('numarray')\n if numarray_info:\n ext_args = {}\n dict_append(ext_args,\n name=dot_join(package,'_na_compiled_base'),\n sources = [compiled_base_c],\n depends = [_compiled_base_c],\n define_macros = [('NUMARRAY',None)],\n include_dirs = [local_path]\n )\n dict_append(ext_args,**numarray_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n # display_test module\n sources = [os.path.join(local_path,'src','display_test.c')]\n x11 = get_info('x11')\n if x11:\n x11['define_macros'] = [('HAVE_X11',None)]\n ext = Extension(dot_join(package,'display_test'), sources, **x11)\n config['ext_modules'].append(ext)\n\n return config\n\nif __name__ == '__main__':\n from scipy_base_version import scipy_base_version\n print 'scipy_base Version',scipy_base_version\n from scipy_distutils.core import setup\n\n setup(version = scipy_base_version,\n maintainer = \"SciPy Developers\",\n maintainer_email = \"scipy-dev@scipy.org\",\n description = \"SciPy base module\",\n url = \"http://www.scipy.org\",\n license = \"SciPy License (BSD Style)\",\n **configuration(parent_path='')\n )\n", "methods": [ { "name": "configuration.compiled_base_c", "long_name": "configuration.compiled_base_c( ext , src_dir )", "filename": "setup_scipy_base.py", "nloc": 5, "complexity": 2, "token_count": 50, "parameters": [ "ext", "src_dir" ], "start_line": 61, "end_line": 65, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "configuration", "long_name": "configuration( parent_package = '' , parent_path = None )", "filename": "setup_scipy_base.py", "nloc": 73, "complexity": 6, "token_count": 499, "parameters": [ "parent_package", "parent_path" ], "start_line": 5, "end_line": 100, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 96, "top_nesting_level": 0 } ], "methods_before": [ { "name": "configuration.compiled_base_c", "long_name": "configuration.compiled_base_c( ext , src_dir )", "filename": "setup_scipy_base.py", "nloc": 5, "complexity": 2, "token_count": 50, "parameters": [ "ext", "src_dir" ], "start_line": 65, "end_line": 69, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "configuration", "long_name": "configuration( parent_package = '' , parent_path = None )", "filename": "setup_scipy_base.py", "nloc": 77, "complexity": 7, "token_count": 533, "parameters": [ "parent_package", "parent_path" ], "start_line": 5, "end_line": 103, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 99, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "configuration", "long_name": "configuration( parent_package = '' , parent_path = None )", "filename": "setup_scipy_base.py", "nloc": 73, "complexity": 6, "token_count": 499, "parameters": [ "parent_package", "parent_path" ], "start_line": 5, "end_line": 100, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 96, "top_nesting_level": 0 } ], "nloc": 91, "complexity": 8, "token_count": 610, "diff_parsed": { "added": [ " umath_c = os.path.join(local_path,'fastumathmodule.c')", "" ], "deleted": [ " depends = umath_c_sources # ????", " depends = [os.path.join(local_path,x) for x in depends]", " umath_c = SourceGenerator(func = None,", " target = os.path.join(local_path,'fastumathmodule.c'),", " sources = umath_c_sources)" ] } } ] }, { "hash": "61b97765779d669db1f014e9c4ef8787dafef27a", "msg": "Added swig_opts introduced in Py2.4", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-03-24T09:17:48+00:00", "author_timezone": 0, "committer_date": "2005-03-24T09:17:48+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "f8929d2f79dc9b1e3abef1e1b15b0b5577bbcd3e" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 0, "insertions": 7, "lines": 7, "files": 1, "dmm_unit_size": 0.0, "dmm_unit_complexity": 1.0, "dmm_unit_interfacing": 0.0, "modified_files": [ { "old_path": "scipy_distutils/extension.py", "new_path": "scipy_distutils/extension.py", "filename": "extension.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -27,6 +27,7 @@ def __init__ (self, name, sources,\n extra_compile_args=None,\n extra_link_args=None,\n export_symbols=None,\n+ swig_opts=None,\n depends=None,\n language=None,\n f2py_options=None,\n@@ -46,13 +47,19 @@ def __init__ (self, name, sources,\n # Avoid assert statements checking that sources contains strings:\n self.sources = sources\n \n+ # Python 2.4 distutils new features\n+ self.swig_opts = swig_opts or []\n+\n # Python 2.3 distutils new features\n self.depends = depends or []\n self.language = language\n \n+ # scipy_distutils features\n self.f2py_options = f2py_options or []\n self.module_dirs = module_dirs or []\n \n+ return\n+\n def has_cxx_sources(self):\n for source in self.sources:\n if isinstance(source,SourceGenerator) \\\n", "added_lines": 7, "deleted_lines": 0, "source_code": "\"\"\"distutils.extension\n\nProvides the Extension class, used to describe C/C++ extension\nmodules in setup scripts.\n\nOverridden to support f2py and SourceGenerator.\n\"\"\"\n\n__revision__ = \"$Id$\"\n\nfrom distutils.extension import Extension as old_Extension\nfrom scipy_distutils.misc_util import SourceGenerator, SourceFilter\n\nimport re\ncxx_ext_re = re.compile(r'.*[.](cpp|cxx|cc)\\Z',re.I).match\nfortran_pyf_ext_re = re.compile(r'.*[.](f90|f95|f77|for|ftn|f|pyf)\\Z',re.I).match\n\nclass Extension(old_Extension):\n def __init__ (self, name, sources,\n include_dirs=None,\n define_macros=None,\n undef_macros=None,\n library_dirs=None,\n libraries=None,\n runtime_library_dirs=None,\n extra_objects=None,\n extra_compile_args=None,\n extra_link_args=None,\n export_symbols=None,\n swig_opts=None,\n depends=None,\n language=None,\n f2py_options=None,\n module_dirs=None,\n ):\n old_Extension.__init__(self,name, [],\n include_dirs,\n define_macros,\n undef_macros,\n library_dirs,\n libraries,\n runtime_library_dirs,\n extra_objects,\n extra_compile_args,\n extra_link_args,\n export_symbols)\n # Avoid assert statements checking that sources contains strings:\n self.sources = sources\n\n # Python 2.4 distutils new features\n self.swig_opts = swig_opts or []\n\n # Python 2.3 distutils new features\n self.depends = depends or []\n self.language = language\n\n # scipy_distutils features\n self.f2py_options = f2py_options or []\n self.module_dirs = module_dirs or []\n\n return\n\n def has_cxx_sources(self):\n for source in self.sources:\n if isinstance(source,SourceGenerator) \\\n or isinstance(source,SourceFilter):\n for s in source.sources:\n if cxx_ext_re(s):\n return 1\n if cxx_ext_re(str(source)):\n return 1\n return 0\n\n def has_f2py_sources(self):\n for source in self.sources:\n if isinstance(source,SourceGenerator) \\\n or isinstance(source,SourceFilter):\n for s in source.sources:\n if fortran_pyf_ext_re(s):\n return 1\n elif fortran_pyf_ext_re(source):\n return 1\n return 0\n\n def generate_sources(self):\n new_sources = []\n for source in self.sources:\n if isinstance(source, SourceGenerator):\n new_sources.append(source.generate())\n elif isinstance(source, SourceFilter):\n new_sources.extend(source.filter())\n else:\n new_sources.append(source)\n self.sources = new_sources\n \n def get_sources(self):\n sources = []\n for source in self.sources:\n if isinstance(source,SourceGenerator):\n sources.extend(source.sources)\n elif isinstance(source,SourceFilter):\n sources.extend(source.sources)\n else:\n sources.append(source)\n return sources\n\n# class Extension\n", "source_code_before": "\"\"\"distutils.extension\n\nProvides the Extension class, used to describe C/C++ extension\nmodules in setup scripts.\n\nOverridden to support f2py and SourceGenerator.\n\"\"\"\n\n__revision__ = \"$Id$\"\n\nfrom distutils.extension import Extension as old_Extension\nfrom scipy_distutils.misc_util import SourceGenerator, SourceFilter\n\nimport re\ncxx_ext_re = re.compile(r'.*[.](cpp|cxx|cc)\\Z',re.I).match\nfortran_pyf_ext_re = re.compile(r'.*[.](f90|f95|f77|for|ftn|f|pyf)\\Z',re.I).match\n\nclass Extension(old_Extension):\n def __init__ (self, name, sources,\n include_dirs=None,\n define_macros=None,\n undef_macros=None,\n library_dirs=None,\n libraries=None,\n runtime_library_dirs=None,\n extra_objects=None,\n extra_compile_args=None,\n extra_link_args=None,\n export_symbols=None,\n depends=None,\n language=None,\n f2py_options=None,\n module_dirs=None,\n ):\n old_Extension.__init__(self,name, [],\n include_dirs,\n define_macros,\n undef_macros,\n library_dirs,\n libraries,\n runtime_library_dirs,\n extra_objects,\n extra_compile_args,\n extra_link_args,\n export_symbols)\n # Avoid assert statements checking that sources contains strings:\n self.sources = sources\n\n # Python 2.3 distutils new features\n self.depends = depends or []\n self.language = language\n\n self.f2py_options = f2py_options or []\n self.module_dirs = module_dirs or []\n\n def has_cxx_sources(self):\n for source in self.sources:\n if isinstance(source,SourceGenerator) \\\n or isinstance(source,SourceFilter):\n for s in source.sources:\n if cxx_ext_re(s):\n return 1\n if cxx_ext_re(str(source)):\n return 1\n return 0\n\n def has_f2py_sources(self):\n for source in self.sources:\n if isinstance(source,SourceGenerator) \\\n or isinstance(source,SourceFilter):\n for s in source.sources:\n if fortran_pyf_ext_re(s):\n return 1\n elif fortran_pyf_ext_re(source):\n return 1\n return 0\n\n def generate_sources(self):\n new_sources = []\n for source in self.sources:\n if isinstance(source, SourceGenerator):\n new_sources.append(source.generate())\n elif isinstance(source, SourceFilter):\n new_sources.extend(source.filter())\n else:\n new_sources.append(source)\n self.sources = new_sources\n \n def get_sources(self):\n sources = []\n for source in self.sources:\n if isinstance(source,SourceGenerator):\n sources.extend(source.sources)\n elif isinstance(source,SourceFilter):\n sources.extend(source.sources)\n else:\n sources.append(source)\n return sources\n\n# class Extension\n", "methods": [ { "name": "__init__", "long_name": "__init__( self , name , sources , include_dirs = None , define_macros = None , undef_macros = None , library_dirs = None , libraries = None , runtime_library_dirs = None , extra_objects = None , extra_compile_args = None , extra_link_args = None , export_symbols = None , swig_opts = None , depends = None , language = None , f2py_options = None , module_dirs = None , )", "filename": "extension.py", "nloc": 35, "complexity": 5, "token_count": 144, "parameters": [ "self", "name", "sources", "include_dirs", "define_macros", "undef_macros", "library_dirs", "libraries", "runtime_library_dirs", "extra_objects", "extra_compile_args", "extra_link_args", "export_symbols", "swig_opts", "depends", "language", "f2py_options", "module_dirs" ], "start_line": 19, "end_line": 61, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 43, "top_nesting_level": 1 }, { "name": "has_cxx_sources", "long_name": "has_cxx_sources( self )", "filename": "extension.py", "nloc": 10, "complexity": 7, "token_count": 56, "parameters": [ "self" ], "start_line": 63, "end_line": 72, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "has_f2py_sources", "long_name": "has_f2py_sources( self )", "filename": "extension.py", "nloc": 10, "complexity": 7, "token_count": 53, "parameters": [ "self" ], "start_line": 74, "end_line": 83, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "generate_sources", "long_name": "generate_sources( self )", "filename": "extension.py", "nloc": 10, "complexity": 4, "token_count": 65, "parameters": [ "self" ], "start_line": 85, "end_line": 94, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "get_sources", "long_name": "get_sources( self )", "filename": "extension.py", "nloc": 10, "complexity": 4, "token_count": 58, "parameters": [ "self" ], "start_line": 96, "end_line": 105, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 } ], "methods_before": [ { "name": "__init__", "long_name": "__init__( self , name , sources , include_dirs = None , define_macros = None , undef_macros = None , library_dirs = None , libraries = None , runtime_library_dirs = None , extra_objects = None , extra_compile_args = None , extra_link_args = None , export_symbols = None , depends = None , language = None , f2py_options = None , module_dirs = None , )", "filename": "extension.py", "nloc": 32, "complexity": 4, "token_count": 131, "parameters": [ "self", "name", "sources", "include_dirs", "define_macros", "undef_macros", "library_dirs", "libraries", "runtime_library_dirs", "extra_objects", "extra_compile_args", "extra_link_args", "export_symbols", "depends", "language", "f2py_options", "module_dirs" ], "start_line": 19, "end_line": 54, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "has_cxx_sources", "long_name": "has_cxx_sources( self )", "filename": "extension.py", "nloc": 10, "complexity": 7, "token_count": 56, "parameters": [ "self" ], "start_line": 56, "end_line": 65, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "has_f2py_sources", "long_name": "has_f2py_sources( self )", "filename": "extension.py", "nloc": 10, "complexity": 7, "token_count": 53, "parameters": [ "self" ], "start_line": 67, "end_line": 76, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "generate_sources", "long_name": "generate_sources( self )", "filename": "extension.py", "nloc": 10, "complexity": 4, "token_count": 65, "parameters": [ "self" ], "start_line": 78, "end_line": 87, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "get_sources", "long_name": "get_sources( self )", "filename": "extension.py", "nloc": 10, "complexity": 4, "token_count": 58, "parameters": [ "self" ], "start_line": 89, "end_line": 98, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 } ], "changed_methods": [ { "name": "__init__", "long_name": "__init__( self , name , sources , include_dirs = None , define_macros = None , undef_macros = None , library_dirs = None , libraries = None , runtime_library_dirs = None , extra_objects = None , extra_compile_args = None , extra_link_args = None , export_symbols = None , swig_opts = None , depends = None , language = None , f2py_options = None , module_dirs = None , )", "filename": "extension.py", "nloc": 35, "complexity": 5, "token_count": 144, "parameters": [ "self", "name", "sources", "include_dirs", "define_macros", "undef_macros", "library_dirs", "libraries", "runtime_library_dirs", "extra_objects", "extra_compile_args", "extra_link_args", "export_symbols", "swig_opts", "depends", "language", "f2py_options", "module_dirs" ], "start_line": 19, "end_line": 61, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 43, "top_nesting_level": 1 } ], "nloc": 89, "complexity": 27, "token_count": 439, "diff_parsed": { "added": [ " swig_opts=None,", " # Python 2.4 distutils new features", " self.swig_opts = swig_opts or []", "", " # scipy_distutils features", " return", "" ], "deleted": [] } } ] }, { "hash": "f8ce14cdaa75a24670a531722a9cfdd425eaea55", "msg": "Improved build_ext --backends support, setup.py files should not use numpy_info anymore, build_src takes care of that.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-03-24T09:22:43+00:00", "author_timezone": 0, "committer_date": "2005-03-24T09:22:43+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "61b97765779d669db1f014e9c4ef8787dafef27a" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 22, "insertions": 41, "lines": 63, "files": 3, "dmm_unit_size": 0.23529411764705882, "dmm_unit_complexity": 0.9411764705882353, "dmm_unit_interfacing": 1.0, "modified_files": [ { "old_path": "scipy_distutils/command/build_ext.py", "new_path": "scipy_distutils/command/build_ext.py", "filename": "build_ext.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -16,7 +16,6 @@\n has_cxx_sources\n from distutils.errors import DistutilsFileError\n \n-\n class build_ext (old_build_ext):\n \n description = \"build C/C++/F extensions (compile/link to build directory)\"\n", "added_lines": 0, "deleted_lines": 1, "source_code": "\"\"\" Modified version of build_ext that handles fortran source files.\n\"\"\"\n\nimport os\nimport string\nimport sys\nfrom glob import glob\nfrom types import *\n\nfrom distutils.dep_util import newer_group, newer\nfrom distutils.command.build_ext import build_ext as old_build_ext\n\nfrom scipy_distutils.command.build_clib import get_headers,get_directories\nfrom scipy_distutils import misc_util, log\nfrom scipy_distutils.misc_util import filter_sources, has_f_sources, \\\n has_cxx_sources\nfrom distutils.errors import DistutilsFileError\n\nclass build_ext (old_build_ext):\n\n description = \"build C/C++/F extensions (compile/link to build directory)\"\n\n user_options = old_build_ext.user_options + [\n ('fcompiler=', None,\n \"specify the Fortran compiler type\"),\n ('backends=', None,\n \"specify the array backends (numeric,numarray,..) as a comma separated list\"),\n ]\n\n def initialize_options(self):\n old_build_ext.initialize_options(self)\n self.fcompiler = None\n self.backends = None\n return\n\n def finalize_options(self):\n old_build_ext.finalize_options(self)\n self.set_undefined_options('config_fc',\n ('fcompiler', 'fcompiler'))\n if self.backends is None:\n self.backends = None\n return\n\n def run(self):\n if not self.extensions:\n return\n\n # Make sure that extension sources are complete.\n for ext in self.extensions:\n if not misc_util.all_strings(ext.sources):\n raise TypeError,'Extension \"%s\" sources contains unresolved'\\\n ' items (call build_src before build_ext).' % (ext.name)\n\n if self.distribution.has_c_libraries():\n build_clib = self.get_finalized_command('build_clib')\n self.library_dirs.append(build_clib.build_clib)\n else:\n build_clib = None\n\n # Not including C libraries to the list of\n # extension libraries automatically to prevent\n # bogus linking commands. Extensions must\n # explicitly specify the C libraries that they use.\n\n # Determine if Fortran compiler is needed.\n if build_clib and build_clib.fcompiler is not None:\n need_f_compiler = 1\n else:\n need_f_compiler = 0\n for ext in self.extensions:\n if has_f_sources(ext.sources):\n need_f_compiler = 1\n break\n if getattr(ext,'language','c') in ['f77','f90']:\n need_f_compiler = 1\n break\n\n # Determine if C++ compiler is needed.\n need_cxx_compiler = 0\n for ext in self.extensions:\n if has_cxx_sources(ext.sources):\n need_cxx_compiler = 1\n break\n if getattr(ext,'language','c')=='c++':\n need_cxx_compiler = 1\n break\n\n from distutils.ccompiler import new_compiler\n self.compiler = new_compiler(compiler=self.compiler,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force)\n self.compiler.customize(self.distribution,need_cxx=need_cxx_compiler)\n self.compiler.customize_cmd(self)\n self.compiler.show_customization()\n \n # Initialize Fortran/C++ compilers if needed.\n if need_f_compiler:\n from scipy_distutils.fcompiler import new_fcompiler\n self.fcompiler = new_fcompiler(compiler=self.fcompiler,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force)\n self.fcompiler.customize(self.distribution)\n self.fcompiler.customize_cmd(self)\n self.fcompiler.show_customization()\n\n # Build extensions\n self.build_extensions()\n return\n\n def swig_sources(self, sources):\n # Do nothing. Swig sources have beed handled in build_src command.\n return sources\n\n def build_extension(self, ext):\n sources = ext.sources\n if sources is None or type(sources) not in (ListType, TupleType):\n raise DistutilsSetupError, \\\n (\"in 'ext_modules' option (extension '%s'), \" +\n \"'sources' must be present and must be \" +\n \"a list of source filenames\") % ext.name\n sources = list(sources)\n\n if not sources:\n return\n\n fullname = self.get_ext_fullname(ext.name)\n if self.inplace:\n modpath = string.split(fullname, '.')\n package = string.join(modpath[0:-1], '.')\n base = modpath[-1]\n\n build_py = self.get_finalized_command('build_py')\n package_dir = build_py.get_package_dir(package)\n ext_filename = os.path.join(package_dir,\n self.get_ext_filename(base))\n else:\n ext_filename = os.path.join(self.build_lib,\n self.get_ext_filename(fullname))\n depends = sources + ext.depends\n\n if not (self.force or newer_group(depends, ext_filename, 'newer')):\n log.debug(\"skipping '%s' extension (up-to-date)\", ext.name)\n return\n else:\n log.info(\"building '%s' extension\", ext.name)\n\n extra_args = ext.extra_compile_args or []\n macros = ext.define_macros[:]\n for undef in ext.undef_macros:\n macros.append((undef,))\n\n c_sources, cxx_sources, f_sources, fmodule_sources = \\\n filter_sources(ext.sources)\n if self.compiler.compiler_type=='msvc':\n if cxx_sources:\n # Needed to compile kiva.agg._agg extension.\n extra_args.append('/Zm1000')\n # this hack works around the msvc compiler attributes\n # problem, msvc uses its own convention :(\n c_sources += cxx_sources\n cxx_sources = []\n\n if sys.version[:3]>='2.3':\n kws = {'depends':ext.depends}\n else:\n kws = {}\n\n backend = getattr(ext,'backend',None)\n if backend is not None:\n output_dir = os.path.join(self.build_temp,'_'+backend)\n else:\n output_dir = self.build_temp\n \n c_objects = []\n if c_sources:\n log.info(\"compiling C sources\")\n c_objects = self.compiler.compile(c_sources,\n output_dir=output_dir,\n macros=macros,\n include_dirs=ext.include_dirs,\n debug=self.debug,\n extra_postargs=extra_args,\n **kws)\n if cxx_sources:\n log.info(\"compiling C++ sources\")\n\n old_compiler = self.compiler.compiler_so[0]\n self.compiler.compiler_so[0] = self.compiler.compiler_cxx[0]\n\n c_objects += self.compiler.compile(cxx_sources,\n output_dir=output_dir,\n macros=macros,\n include_dirs=ext.include_dirs,\n debug=self.debug,\n extra_postargs=extra_args,\n **kws)\n self.compiler.compiler_so[0] = old_compiler\n\n check_for_f90_modules = not not fmodule_sources\n\n if f_sources or fmodule_sources:\n extra_postargs = []\n include_dirs = ext.include_dirs[:]\n module_dirs = ext.module_dirs[:]\n\n #if self.fcompiler.compiler_type=='ibm':\n macros = []\n\n if check_for_f90_modules:\n module_build_dir = os.path.join(\\\n self.build_temp,os.path.dirname(\\\n self.get_ext_filename(fullname)))\n\n self.mkpath(module_build_dir)\n if self.fcompiler.module_dir_switch is None:\n existing_modules = glob('*.mod')\n extra_postargs += self.fcompiler.module_options(\\\n module_dirs,module_build_dir)\n\n f_objects = []\n if fmodule_sources:\n log.info(\"compiling Fortran 90 module sources\")\n f_objects = self.fcompiler.compile(fmodule_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_postargs,\n depends=ext.depends)\n\n if check_for_f90_modules \\\n and self.fcompiler.module_dir_switch is None:\n for f in glob('*.mod'):\n if f in existing_modules:\n continue\n try:\n self.move_file(f, module_build_dir)\n except DistutilsFileError: # already exists in destination\n os.remove(f)\n \n if f_sources:\n log.info(\"compiling Fortran sources\")\n f_objects += self.fcompiler.compile(f_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_postargs,\n depends=ext.depends)\n else:\n f_objects = []\n\n objects = c_objects + f_objects\n\n if ext.extra_objects:\n objects.extend(ext.extra_objects)\n extra_args = ext.extra_link_args or []\n\n try:\n old_linker_so_0 = self.compiler.linker_so[0]\n except:\n pass\n \n use_fortran_linker = getattr(ext,'language','c') in ['f77','f90']\n c_libraries = []\n c_library_dirs = []\n if use_fortran_linker or f_sources:\n use_fortran_linker = 1\n elif self.distribution.has_c_libraries(): \n build_clib = self.get_finalized_command('build_clib')\n f_libs = []\n for (lib_name, build_info) in build_clib.libraries:\n if has_f_sources(build_info.get('sources',[])):\n f_libs.append(lib_name)\n if lib_name in ext.libraries:\n # XXX: how to determine if c_libraries contain\n # fortran compiled sources?\n c_libraries.extend(build_info.get('libraries',[]))\n c_library_dirs.extend(build_info.get('library_dirs',[]))\n for l in ext.libraries:\n if l in f_libs:\n use_fortran_linker = 1\n break\n\n # Always use system linker when using MSVC compiler.\n if self.compiler.compiler_type=='msvc' and use_fortran_linker:\n c_libraries.extend(self.fcompiler.libraries)\n c_library_dirs.extend(self.fcompiler.library_dirs)\n use_fortran_linker = 0\n\n if use_fortran_linker:\n if cxx_sources:\n # XXX: Which linker should be used, Fortran or C++?\n log.warn('mixing Fortran and C++ is untested')\n link = self.fcompiler.link_shared_object\n language = ext.language or self.fcompiler.detect_language(f_sources)\n else:\n link = self.compiler.link_shared_object\n if sys.version[:3]>='2.3':\n language = ext.language or self.compiler.detect_language(sources)\n else:\n language = ext.language\n if cxx_sources:\n self.compiler.linker_so[0] = self.compiler.compiler_cxx[0]\n\n if sys.version[:3]>='2.3':\n kws = {'target_lang':language}\n else:\n kws = {}\n\n link(objects, ext_filename,\n libraries=self.get_libraries(ext) + c_libraries,\n library_dirs=ext.library_dirs + c_library_dirs,\n runtime_library_dirs=ext.runtime_library_dirs,\n extra_postargs=extra_args,\n export_symbols=self.get_export_symbols(ext),\n debug=self.debug,\n build_temp=self.build_temp,**kws)\n\n try:\n self.compiler.linker_so[0] = old_linker_so_0\n except:\n pass\n\n return\n\n def get_source_files (self):\n self.check_extensions_list(self.extensions)\n filenames = []\n def visit_func(filenames,dirname,names):\n if os.path.basename(dirname) in ['CVS','.svn']:\n names[:] = []\n return\n for name in names:\n if name[-1] in \"~#\":\n continue\n fullname = os.path.join(dirname,name)\n if os.path.isfile(fullname):\n filenames.append(fullname)\n # Get sources and any include files in the same directory.\n for ext in self.extensions:\n sources = filter(lambda s:type(s) is StringType,ext.sources)\n filenames.extend(sources)\n filenames.extend(get_headers(get_directories(sources)))\n for d in ext.depends:\n if is_local_src_dir(d):\n os.path.walk(d,visit_func,filenames)\n elif os.path.isfile(d):\n filenames.append(d)\n return filenames\n\n def get_outputs (self):\n self.check_extensions_list(self.extensions)\n\n outputs = []\n for ext in self.extensions:\n if not ext.sources:\n continue\n fullname = self.get_ext_fullname(ext.name)\n outputs.append(os.path.join(self.build_lib,\n self.get_ext_filename(fullname)))\n return outputs\n\ndef is_local_src_dir(directory):\n \"\"\" Return true if directory is local directory.\n \"\"\"\n abs_dir = os.path.abspath(directory)\n c = os.path.commonprefix([os.getcwd(),abs_dir])\n new_dir = abs_dir[len(c):].split(os.sep)\n if new_dir and not new_dir[0]:\n new_dir = new_dir[1:]\n if new_dir and new_dir[0]=='build':\n return 0\n new_dir = os.sep.join(new_dir)\n return os.path.isdir(new_dir)\n", "source_code_before": "\"\"\" Modified version of build_ext that handles fortran source files.\n\"\"\"\n\nimport os\nimport string\nimport sys\nfrom glob import glob\nfrom types import *\n\nfrom distutils.dep_util import newer_group, newer\nfrom distutils.command.build_ext import build_ext as old_build_ext\n\nfrom scipy_distutils.command.build_clib import get_headers,get_directories\nfrom scipy_distutils import misc_util, log\nfrom scipy_distutils.misc_util import filter_sources, has_f_sources, \\\n has_cxx_sources\nfrom distutils.errors import DistutilsFileError\n\n\nclass build_ext (old_build_ext):\n\n description = \"build C/C++/F extensions (compile/link to build directory)\"\n\n user_options = old_build_ext.user_options + [\n ('fcompiler=', None,\n \"specify the Fortran compiler type\"),\n ('backends=', None,\n \"specify the array backends (numeric,numarray,..) as a comma separated list\"),\n ]\n\n def initialize_options(self):\n old_build_ext.initialize_options(self)\n self.fcompiler = None\n self.backends = None\n return\n\n def finalize_options(self):\n old_build_ext.finalize_options(self)\n self.set_undefined_options('config_fc',\n ('fcompiler', 'fcompiler'))\n if self.backends is None:\n self.backends = None\n return\n\n def run(self):\n if not self.extensions:\n return\n\n # Make sure that extension sources are complete.\n for ext in self.extensions:\n if not misc_util.all_strings(ext.sources):\n raise TypeError,'Extension \"%s\" sources contains unresolved'\\\n ' items (call build_src before build_ext).' % (ext.name)\n\n if self.distribution.has_c_libraries():\n build_clib = self.get_finalized_command('build_clib')\n self.library_dirs.append(build_clib.build_clib)\n else:\n build_clib = None\n\n # Not including C libraries to the list of\n # extension libraries automatically to prevent\n # bogus linking commands. Extensions must\n # explicitly specify the C libraries that they use.\n\n # Determine if Fortran compiler is needed.\n if build_clib and build_clib.fcompiler is not None:\n need_f_compiler = 1\n else:\n need_f_compiler = 0\n for ext in self.extensions:\n if has_f_sources(ext.sources):\n need_f_compiler = 1\n break\n if getattr(ext,'language','c') in ['f77','f90']:\n need_f_compiler = 1\n break\n\n # Determine if C++ compiler is needed.\n need_cxx_compiler = 0\n for ext in self.extensions:\n if has_cxx_sources(ext.sources):\n need_cxx_compiler = 1\n break\n if getattr(ext,'language','c')=='c++':\n need_cxx_compiler = 1\n break\n\n from distutils.ccompiler import new_compiler\n self.compiler = new_compiler(compiler=self.compiler,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force)\n self.compiler.customize(self.distribution,need_cxx=need_cxx_compiler)\n self.compiler.customize_cmd(self)\n self.compiler.show_customization()\n \n # Initialize Fortran/C++ compilers if needed.\n if need_f_compiler:\n from scipy_distutils.fcompiler import new_fcompiler\n self.fcompiler = new_fcompiler(compiler=self.fcompiler,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force)\n self.fcompiler.customize(self.distribution)\n self.fcompiler.customize_cmd(self)\n self.fcompiler.show_customization()\n\n # Build extensions\n self.build_extensions()\n return\n\n def swig_sources(self, sources):\n # Do nothing. Swig sources have beed handled in build_src command.\n return sources\n\n def build_extension(self, ext):\n sources = ext.sources\n if sources is None or type(sources) not in (ListType, TupleType):\n raise DistutilsSetupError, \\\n (\"in 'ext_modules' option (extension '%s'), \" +\n \"'sources' must be present and must be \" +\n \"a list of source filenames\") % ext.name\n sources = list(sources)\n\n if not sources:\n return\n\n fullname = self.get_ext_fullname(ext.name)\n if self.inplace:\n modpath = string.split(fullname, '.')\n package = string.join(modpath[0:-1], '.')\n base = modpath[-1]\n\n build_py = self.get_finalized_command('build_py')\n package_dir = build_py.get_package_dir(package)\n ext_filename = os.path.join(package_dir,\n self.get_ext_filename(base))\n else:\n ext_filename = os.path.join(self.build_lib,\n self.get_ext_filename(fullname))\n depends = sources + ext.depends\n\n if not (self.force or newer_group(depends, ext_filename, 'newer')):\n log.debug(\"skipping '%s' extension (up-to-date)\", ext.name)\n return\n else:\n log.info(\"building '%s' extension\", ext.name)\n\n extra_args = ext.extra_compile_args or []\n macros = ext.define_macros[:]\n for undef in ext.undef_macros:\n macros.append((undef,))\n\n c_sources, cxx_sources, f_sources, fmodule_sources = \\\n filter_sources(ext.sources)\n if self.compiler.compiler_type=='msvc':\n if cxx_sources:\n # Needed to compile kiva.agg._agg extension.\n extra_args.append('/Zm1000')\n # this hack works around the msvc compiler attributes\n # problem, msvc uses its own convention :(\n c_sources += cxx_sources\n cxx_sources = []\n\n if sys.version[:3]>='2.3':\n kws = {'depends':ext.depends}\n else:\n kws = {}\n\n backend = getattr(ext,'backend',None)\n if backend is not None:\n output_dir = os.path.join(self.build_temp,'_'+backend)\n else:\n output_dir = self.build_temp\n \n c_objects = []\n if c_sources:\n log.info(\"compiling C sources\")\n c_objects = self.compiler.compile(c_sources,\n output_dir=output_dir,\n macros=macros,\n include_dirs=ext.include_dirs,\n debug=self.debug,\n extra_postargs=extra_args,\n **kws)\n if cxx_sources:\n log.info(\"compiling C++ sources\")\n\n old_compiler = self.compiler.compiler_so[0]\n self.compiler.compiler_so[0] = self.compiler.compiler_cxx[0]\n\n c_objects += self.compiler.compile(cxx_sources,\n output_dir=output_dir,\n macros=macros,\n include_dirs=ext.include_dirs,\n debug=self.debug,\n extra_postargs=extra_args,\n **kws)\n self.compiler.compiler_so[0] = old_compiler\n\n check_for_f90_modules = not not fmodule_sources\n\n if f_sources or fmodule_sources:\n extra_postargs = []\n include_dirs = ext.include_dirs[:]\n module_dirs = ext.module_dirs[:]\n\n #if self.fcompiler.compiler_type=='ibm':\n macros = []\n\n if check_for_f90_modules:\n module_build_dir = os.path.join(\\\n self.build_temp,os.path.dirname(\\\n self.get_ext_filename(fullname)))\n\n self.mkpath(module_build_dir)\n if self.fcompiler.module_dir_switch is None:\n existing_modules = glob('*.mod')\n extra_postargs += self.fcompiler.module_options(\\\n module_dirs,module_build_dir)\n\n f_objects = []\n if fmodule_sources:\n log.info(\"compiling Fortran 90 module sources\")\n f_objects = self.fcompiler.compile(fmodule_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_postargs,\n depends=ext.depends)\n\n if check_for_f90_modules \\\n and self.fcompiler.module_dir_switch is None:\n for f in glob('*.mod'):\n if f in existing_modules:\n continue\n try:\n self.move_file(f, module_build_dir)\n except DistutilsFileError: # already exists in destination\n os.remove(f)\n \n if f_sources:\n log.info(\"compiling Fortran sources\")\n f_objects += self.fcompiler.compile(f_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_postargs,\n depends=ext.depends)\n else:\n f_objects = []\n\n objects = c_objects + f_objects\n\n if ext.extra_objects:\n objects.extend(ext.extra_objects)\n extra_args = ext.extra_link_args or []\n\n try:\n old_linker_so_0 = self.compiler.linker_so[0]\n except:\n pass\n \n use_fortran_linker = getattr(ext,'language','c') in ['f77','f90']\n c_libraries = []\n c_library_dirs = []\n if use_fortran_linker or f_sources:\n use_fortran_linker = 1\n elif self.distribution.has_c_libraries(): \n build_clib = self.get_finalized_command('build_clib')\n f_libs = []\n for (lib_name, build_info) in build_clib.libraries:\n if has_f_sources(build_info.get('sources',[])):\n f_libs.append(lib_name)\n if lib_name in ext.libraries:\n # XXX: how to determine if c_libraries contain\n # fortran compiled sources?\n c_libraries.extend(build_info.get('libraries',[]))\n c_library_dirs.extend(build_info.get('library_dirs',[]))\n for l in ext.libraries:\n if l in f_libs:\n use_fortran_linker = 1\n break\n\n # Always use system linker when using MSVC compiler.\n if self.compiler.compiler_type=='msvc' and use_fortran_linker:\n c_libraries.extend(self.fcompiler.libraries)\n c_library_dirs.extend(self.fcompiler.library_dirs)\n use_fortran_linker = 0\n\n if use_fortran_linker:\n if cxx_sources:\n # XXX: Which linker should be used, Fortran or C++?\n log.warn('mixing Fortran and C++ is untested')\n link = self.fcompiler.link_shared_object\n language = ext.language or self.fcompiler.detect_language(f_sources)\n else:\n link = self.compiler.link_shared_object\n if sys.version[:3]>='2.3':\n language = ext.language or self.compiler.detect_language(sources)\n else:\n language = ext.language\n if cxx_sources:\n self.compiler.linker_so[0] = self.compiler.compiler_cxx[0]\n\n if sys.version[:3]>='2.3':\n kws = {'target_lang':language}\n else:\n kws = {}\n\n link(objects, ext_filename,\n libraries=self.get_libraries(ext) + c_libraries,\n library_dirs=ext.library_dirs + c_library_dirs,\n runtime_library_dirs=ext.runtime_library_dirs,\n extra_postargs=extra_args,\n export_symbols=self.get_export_symbols(ext),\n debug=self.debug,\n build_temp=self.build_temp,**kws)\n\n try:\n self.compiler.linker_so[0] = old_linker_so_0\n except:\n pass\n\n return\n\n def get_source_files (self):\n self.check_extensions_list(self.extensions)\n filenames = []\n def visit_func(filenames,dirname,names):\n if os.path.basename(dirname) in ['CVS','.svn']:\n names[:] = []\n return\n for name in names:\n if name[-1] in \"~#\":\n continue\n fullname = os.path.join(dirname,name)\n if os.path.isfile(fullname):\n filenames.append(fullname)\n # Get sources and any include files in the same directory.\n for ext in self.extensions:\n sources = filter(lambda s:type(s) is StringType,ext.sources)\n filenames.extend(sources)\n filenames.extend(get_headers(get_directories(sources)))\n for d in ext.depends:\n if is_local_src_dir(d):\n os.path.walk(d,visit_func,filenames)\n elif os.path.isfile(d):\n filenames.append(d)\n return filenames\n\n def get_outputs (self):\n self.check_extensions_list(self.extensions)\n\n outputs = []\n for ext in self.extensions:\n if not ext.sources:\n continue\n fullname = self.get_ext_fullname(ext.name)\n outputs.append(os.path.join(self.build_lib,\n self.get_ext_filename(fullname)))\n return outputs\n\ndef is_local_src_dir(directory):\n \"\"\" Return true if directory is local directory.\n \"\"\"\n abs_dir = os.path.abspath(directory)\n c = os.path.commonprefix([os.getcwd(),abs_dir])\n new_dir = abs_dir[len(c):].split(os.sep)\n if new_dir and not new_dir[0]:\n new_dir = new_dir[1:]\n if new_dir and new_dir[0]=='build':\n return 0\n new_dir = os.sep.join(new_dir)\n return os.path.isdir(new_dir)\n", "methods": [ { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_ext.py", "nloc": 5, "complexity": 1, "token_count": 22, "parameters": [ "self" ], "start_line": 30, "end_line": 34, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_ext.py", "nloc": 7, "complexity": 2, "token_count": 36, "parameters": [ "self" ], "start_line": 36, "end_line": 42, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_ext.py", "nloc": 50, "complexity": 14, "token_count": 304, "parameters": [ "self" ], "start_line": 44, "end_line": 110, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 67, "top_nesting_level": 1 }, { "name": "swig_sources", "long_name": "swig_sources( self , sources )", "filename": "build_ext.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self", "sources" ], "start_line": 112, "end_line": 114, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "build_extension", "long_name": "build_extension( self , ext )", "filename": "build_ext.py", "nloc": 175, "complexity": 47, "token_count": 1129, "parameters": [ "self", "ext" ], "start_line": 116, "end_line": 327, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 212, "top_nesting_level": 1 }, { "name": "get_source_files.visit_func", "long_name": "get_source_files.visit_func( filenames , dirname , names )", "filename": "build_ext.py", "nloc": 10, "complexity": 5, "token_count": 76, "parameters": [ "filenames", "dirname", "names" ], "start_line": 332, "end_line": 341, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 2 }, { "name": "get_source_files", "long_name": "get_source_files( self )", "filename": "build_ext.py", "nloc": 14, "complexity": 5, "token_count": 105, "parameters": [ "self" ], "start_line": 329, "end_line": 352, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 24, "top_nesting_level": 1 }, { "name": "get_outputs", "long_name": "get_outputs( self )", "filename": "build_ext.py", "nloc": 10, "complexity": 3, "token_count": 65, "parameters": [ "self" ], "start_line": 354, "end_line": 364, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 }, { "name": "is_local_src_dir", "long_name": "is_local_src_dir( directory )", "filename": "build_ext.py", "nloc": 10, "complexity": 5, "token_count": 98, "parameters": [ "directory" ], "start_line": 366, "end_line": 377, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 } ], "methods_before": [ { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_ext.py", "nloc": 5, "complexity": 1, "token_count": 22, "parameters": [ "self" ], "start_line": 31, "end_line": 35, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_ext.py", "nloc": 7, "complexity": 2, "token_count": 36, "parameters": [ "self" ], "start_line": 37, "end_line": 43, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_ext.py", "nloc": 50, "complexity": 14, "token_count": 304, "parameters": [ "self" ], "start_line": 45, "end_line": 111, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 67, "top_nesting_level": 1 }, { "name": "swig_sources", "long_name": "swig_sources( self , sources )", "filename": "build_ext.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self", "sources" ], "start_line": 113, "end_line": 115, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "build_extension", "long_name": "build_extension( self , ext )", "filename": "build_ext.py", "nloc": 175, "complexity": 47, "token_count": 1129, "parameters": [ "self", "ext" ], "start_line": 117, "end_line": 328, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 212, "top_nesting_level": 1 }, { "name": "get_source_files.visit_func", "long_name": "get_source_files.visit_func( filenames , dirname , names )", "filename": "build_ext.py", "nloc": 10, "complexity": 5, "token_count": 76, "parameters": [ "filenames", "dirname", "names" ], "start_line": 333, "end_line": 342, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 2 }, { "name": "get_source_files", "long_name": "get_source_files( self )", "filename": "build_ext.py", "nloc": 14, "complexity": 5, "token_count": 105, "parameters": [ "self" ], "start_line": 330, "end_line": 353, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 24, "top_nesting_level": 1 }, { "name": "get_outputs", "long_name": "get_outputs( self )", "filename": "build_ext.py", "nloc": 10, "complexity": 3, "token_count": 65, "parameters": [ "self" ], "start_line": 355, "end_line": 365, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 }, { "name": "is_local_src_dir", "long_name": "is_local_src_dir( directory )", "filename": "build_ext.py", "nloc": 10, "complexity": 5, "token_count": 98, "parameters": [ "directory" ], "start_line": 367, "end_line": 378, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 } ], "changed_methods": [], "nloc": 304, "complexity": 83, "token_count": 1950, "diff_parsed": { "added": [], "deleted": [ "" ] } }, { "old_path": "scipy_distutils/command/build_src.py", "new_path": "scipy_distutils/command/build_src.py", "filename": "build_src.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -3,6 +3,7 @@\n \n import os\n import re\n+import copy\n \n from distutils.cmd import Command\n from distutils.command import build_ext, build_py\n@@ -13,6 +14,7 @@\n from scipy_distutils.misc_util import fortran_ext_match, all_strings, dot_join\n from scipy_distutils.from_template import process_file\n from scipy_distutils.extension import Extension\n+from scipy_distutils.system_info import get_info, dict_append\n \n _split_ext_template = '''\n import os\n@@ -48,6 +50,8 @@\n del ___a, ___m\n '''\n \n+def _get_constructor_argnames(obj):\n+ return obj.__init__.im_func.func_code.co_varnames[1:]\n \n class build_src(build_ext.build_ext):\n \n@@ -120,13 +124,35 @@ def run(self):\n return\n if self.backends is not None:\n self.backend_split()\n+ else:\n+ self.apply_backend_info('numeric')\n self.build_sources()\n return\n \n+ def apply_backend_info(self,backend):\n+ log.info('applying backend (%s) info to extensions'\\\n+ % (backend))\n+ backend_info = get_info(backend,notfound_action=1)\n+ if not backend_info:\n+ return\n+ extensions = []\n+ for ext in self.extensions:\n+ ext_args = {}\n+ for a in _get_constructor_argnames(ext):\n+ ext_args[a] = copy.copy(getattr(ext,a))\n+ dict_append(ext_args,**backend_info)\n+ new_ext = Extension(**ext_args)\n+ extensions.append(new_ext) \n+ self.extensions[:] = extensions\n+ return\n+\n def backend_split(self):\n+ backends = self.backends.split(',')\n+ if len(backends)==1:\n+ self.apply_backend_info(backends[0])\n+ return\n log.info('splitting extensions for backends: %s' % (self.backends))\n extensions = []\n- backends = self.backends.split(',')\n for ext in self.extensions:\n name = ext.name.split('.')[-1]\n fullname = self.get_ext_fullname(ext.name)\n@@ -162,31 +188,23 @@ def func_init(extension, src_dir):\n ext.sources = [func]\n extensions.append(ext)\n self.extensions[:] = extensions\n+ return\n \n def split_extension(self, ext, backend):\n fullname = self.get_ext_fullname(ext.name)\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n name = modpath[-1]\n- macros = []\n- macros.append((backend.upper(),None))\n- new_ext = Extension(name = dot_join(package,'_%s.%s' % (backend,name)),\n- sources = ext.sources,\n- include_dirs = ext.include_dirs,\n- define_macros = ext.define_macros + macros,\n- undef_macros = ext.undef_macros,\n- library_dirs = ext.library_dirs,\n- libraries = ext.libraries,\n- runtime_library_dirs = ext.runtime_library_dirs,\n- extra_objects = ext.extra_objects,\n- extra_compile_args = ext.extra_compile_args,\n- extra_link_args = ext.extra_link_args,\n- export_symbols = ext.export_symbols,\n- depends = ext.depends,\n- language = ext.language,\n- f2py_options = ext.f2py_options,\n- module_dirs = ext.module_dirs\n- )\n+ ext_args = {}\n+ for a in _get_constructor_argnames(ext):\n+ if a=='name':\n+ ext_args[a] = dot_join(package,'_%s.%s' % (backend,name))\n+ else:\n+ ext_args[a] = copy.copy(getattr(ext,a))\n+ backend_info = get_info(backend,notfound_action=1)\n+ if backend_info:\n+ dict_append(ext_args,**backend_info)\n+ new_ext = Extension(**ext_args)\n new_ext.backend = backend\n return new_ext\n \n", "added_lines": 38, "deleted_lines": 20, "source_code": "\"\"\" Build swig, f2py, weave, sources.\n\"\"\"\n\nimport os\nimport re\nimport copy\n\nfrom distutils.cmd import Command\nfrom distutils.command import build_ext, build_py\nfrom distutils.util import convert_path\nfrom distutils.dep_util import newer_group, newer\n\nfrom scipy_distutils import log\nfrom scipy_distutils.misc_util import fortran_ext_match, all_strings, dot_join\nfrom scipy_distutils.from_template import process_file\nfrom scipy_distutils.extension import Extension\nfrom scipy_distutils.system_info import get_info, dict_append\n\n_split_ext_template = '''\nimport os\nimport sys\n\n_which = None, None\n_backends = %(backends)r\nif hasattr(sys, \"argv\"):\n i = -1\n for a in sys.argv:\n i += 1\n if a.lower()[2:] in _backends: \n _which = a.lower()[2:], \"command line\"\n del sys.argv[i]\n os.environ[_which[0].upper()] = _which[0]\n break\n del a\n\nif _which[0] is None:\n for b in _backends:\n if os.environ.get(b.upper(),None):\n _which = b, \"environment var\"\n break\n del b\n\nif _which[0] is None:\n _which = _backends[0], \"defaulted\"\n\nexec \"import _\" + _which[0] + \".%(name)s as ___m\"\nfor ___a in dir(___m):\n exec ___a + \" = getattr(___m,___a)\"\nelse:\n del ___a, ___m\n'''\n\ndef _get_constructor_argnames(obj):\n return obj.__init__.im_func.func_code.co_varnames[1:]\n\nclass build_src(build_ext.build_ext):\n\n description = \"build sources from SWIG, F2PY files or a function\"\n\n user_options = [\n ('build-src=', 'd', \"directory to \\\"build\\\" sources to\"),\n ('f2pyflags=', None, \"additonal flags to f2py\"),\n ('swigflags=', None, \"additional flags to swig\"),\n ('force', 'f', \"forcibly build everything (ignore file timestamps)\"),\n ('inplace', 'i',\n \"ignore build-lib and put compiled extensions into the source \" +\n \"directory alongside your pure Python modules\"),\n ]\n\n boolean_options = ['force','inplace']\n\n help_options = []\n\n def initialize_options(self):\n self.extensions = None\n self.package = None\n self.py_modules = None\n self.build_src = None\n self.build_lib = None\n self.build_base = None\n self.force = None\n self.inplace = None\n self.package_dir = None\n self.f2pyflags = None\n self.swigflags = None\n self.backends = None\n return\n\n def finalize_options(self):\n self.set_undefined_options('build',\n ('build_base', 'build_base'),\n ('build_lib', 'build_lib'),\n ('force', 'force'))\n if self.package is None:\n self.package = self.distribution.ext_package\n self.extensions = self.distribution.ext_modules\n self.libraries = self.distribution.libraries or []\n self.py_modules = self.distribution.py_modules\n if self.build_src is None:\n self.build_src = os.path.join(self.build_base, 'src')\n if self.inplace is None:\n build_ext = self.get_finalized_command('build_ext')\n self.inplace = build_ext.inplace\n if self.backends is None:\n build_ext = self.get_finalized_command('build_ext')\n self.backends = build_ext.backends\n\n # py_modules is used in build_py.find_package_modules\n self.py_modules = {}\n\n if self.f2pyflags is None:\n self.f2pyflags = []\n else:\n self.f2pyflags = self.f2pyflags.split() # XXX spaces??\n\n if self.swigflags is None:\n self.swigflags = []\n else:\n self.swigflags = self.swigflags.split() # XXX spaces??\n return\n\n def run(self):\n if not (self.extensions or self.libraries):\n return\n if self.backends is not None:\n self.backend_split()\n else:\n self.apply_backend_info('numeric')\n self.build_sources()\n return\n\n def apply_backend_info(self,backend):\n log.info('applying backend (%s) info to extensions'\\\n % (backend))\n backend_info = get_info(backend,notfound_action=1)\n if not backend_info:\n return\n extensions = []\n for ext in self.extensions:\n ext_args = {}\n for a in _get_constructor_argnames(ext):\n ext_args[a] = copy.copy(getattr(ext,a))\n dict_append(ext_args,**backend_info)\n new_ext = Extension(**ext_args)\n extensions.append(new_ext) \n self.extensions[:] = extensions\n return\n\n def backend_split(self):\n backends = self.backends.split(',')\n if len(backends)==1:\n self.apply_backend_info(backends[0])\n return\n log.info('splitting extensions for backends: %s' % (self.backends))\n extensions = []\n for ext in self.extensions:\n name = ext.name.split('.')[-1]\n fullname = self.get_ext_fullname(ext.name)\n def func(extension, src_dir,\n name=name,\n fullname=fullname,\n backends=backends):\n source = os.path.join(os.path.dirname(src_dir),name+'.py')\n if newer(__file__, source):\n f = open(source,'w')\n f.write(_split_ext_template \\\n % {'name':name,'fullname':fullname,\n 'backends':backends})\n f.close()\n return [ source ]\n def func_init(extension, src_dir):\n source = os.path.join(src_dir,'__init__.py')\n if newer(__file__, source):\n f = open(source,'w')\n f.close()\n return [source]\n for b in backends:\n new_ext = self.split_extension(ext,b)\n new_ext.sources.append(func_init)\n extensions.append(new_ext)\n\n new_package = dot_join(*(ext.name.split('.')[:-1]+['_'+b]))\n new_package_dir = os.path.join(*([self.build_src]+ext.name.split('.')[:-1]+['_'+b]))\n if new_package not in self.distribution.packages:\n self.distribution.packages.append(new_package)\n self.distribution.package_dir[new_package] = new_package_dir\n\n ext.sources = [func]\n extensions.append(ext)\n self.extensions[:] = extensions\n return\n\n def split_extension(self, ext, backend):\n fullname = self.get_ext_fullname(ext.name)\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n name = modpath[-1]\n ext_args = {}\n for a in _get_constructor_argnames(ext):\n if a=='name':\n ext_args[a] = dot_join(package,'_%s.%s' % (backend,name))\n else:\n ext_args[a] = copy.copy(getattr(ext,a))\n backend_info = get_info(backend,notfound_action=1)\n if backend_info:\n dict_append(ext_args,**backend_info)\n new_ext = Extension(**ext_args)\n new_ext.backend = backend\n return new_ext\n \n def build_sources(self):\n self.check_extensions_list(self.extensions)\n\n for ext in self.extensions:\n self.build_extension_sources(ext)\n\n for libname_info in self.libraries:\n self.build_library_sources(*libname_info)\n\n return\n\n def build_library_sources(self, lib_name, build_info):\n sources = list(build_info.get('sources',[]))\n\n if not sources:\n return\n\n log.info('building library \"%s\" sources' % (lib_name))\n\n sources = self.generate_sources(sources, (lib_name, build_info))\n\n build_info['sources'] = sources\n return\n\n def build_extension_sources(self, ext):\n sources = list(ext.sources)\n\n log.info('building extension \"%s\" sources' % (ext.name))\n\n fullname = self.get_ext_fullname(ext.name)\n\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n\n if self.inplace:\n build_py = self.get_finalized_command('build_py')\n self.ext_target_dir = build_py.get_package_dir(package)\n\n sources = self.generate_sources(sources, ext)\n\n sources = self.template_sources(sources, ext)\n \n sources = self.swig_sources(sources, ext)\n\n sources = self.f2py_sources(sources, ext)\n\n sources, py_files = self.filter_py_files(sources)\n\n if not self.py_modules.has_key(package):\n self.py_modules[package] = []\n modules = []\n for f in py_files:\n module = os.path.splitext(os.path.basename(f))[0]\n modules.append((package, module, f))\n self.py_modules[package] += modules\n\n ext.sources = sources\n return\n\n def generate_sources(self, sources, extension):\n new_sources = []\n func_sources = []\n for source in sources:\n if type(source) is type(''):\n new_sources.append(source)\n else:\n func_sources.append(source)\n if not func_sources:\n return new_sources\n if self.inplace:\n build_dir = self.ext_target_dir\n else:\n if type(extension) is type(()):\n name = extension[0]\n else:\n name = extension.name\n build_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n self.mkpath(build_dir)\n for func in func_sources:\n source = func(extension, build_dir)\n if type(source) is type([]):\n [log.info(\" adding '%s' to sources.\" % (s)) for s in source]\n new_sources.extend(source)\n else:\n log.info(\" adding '%s' to sources.\" % (source))\n new_sources.append(source)\n return new_sources\n\n def filter_py_files(self, sources):\n new_sources = []\n py_files = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext=='.py': \n py_files.append(source)\n else:\n new_sources.append(source)\n return new_sources, py_files\n\n def template_sources(self, sources, extension):\n new_sources = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.src': # Template file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n self.mkpath(target_dir)\n target_file = os.path.join(target_dir,os.path.basename(base))\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file)):\n log.info(\"from_template:> %s\" % (target_file))\n outstr = process_file(source)\n fid = open(target_file,'w')\n fid.write(outstr)\n fid.close()\n new_sources.append(target_file)\n else:\n new_sources.append(source)\n return new_sources \n \n def f2py_sources(self, sources, extension):\n new_sources = []\n f2py_sources = []\n f_sources = []\n f2py_targets = {}\n target_dirs = []\n ext_name = extension.name.split('.')[-1]\n skip_f2py = 0\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.pyf': # F2PY interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n if os.path.isfile(source):\n name = get_f2py_modulename(source)\n assert name==ext_name,'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name`\n target_file = os.path.join(target_dir,name+'module.c')\n else:\n log.debug(' source %s does not exist: skipping f2py\\'ing.' \\\n % (source))\n name = ext_name\n skip_f2py = 1\n target_file = os.path.join(target_dir,name+'module.c')\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %smodule.c was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = os.path.join(target_dir,name+'module.c')\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n f2py_sources.append(source)\n f2py_targets[source] = target_file\n new_sources.append(target_file)\n elif fortran_ext_match(ext):\n f_sources.append(source)\n else:\n new_sources.append(source)\n\n if not (f2py_sources or f_sources):\n return new_sources\n\n map(self.mkpath, target_dirs)\n\n f2py_options = extension.f2py_options + self.f2pyflags\n if f2py_sources:\n assert len(f2py_sources)==1,\\\n 'only one .pyf file is allowed per extension module but got'\\\n ' more:'+`f2py_sources`\n source = f2py_sources[0]\n target_file = f2py_targets[source]\n target_dir = os.path.dirname(target_file) or '.'\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file,'newer')) \\\n and not skip_f2py:\n log.info(\"f2py: %s\" % (source))\n import f2py2e\n f2py2e.run_main(f2py_options + ['--build-dir',target_dir,source])\n else:\n log.debug(\" skipping '%s' f2py interface (up-to-date)\" % (source))\n else:\n #XXX TODO: --inplace support for sdist command\n if type(extension) is type(()): name = extension[0]\n else: name = extension.name\n target_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n target_file = os.path.join(target_dir,ext_name + 'module.c')\n new_sources.append(target_file)\n depends = f_sources + extension.depends\n if (self.force or newer_group(depends, target_file, 'newer')) \\\n and not skip_f2py:\n import f2py2e\n log.info(\"f2py:> %s\" % (target_file))\n self.mkpath(target_dir)\n f2py2e.run_main(f2py_options + ['--lower',\n '--build-dir',target_dir]+\\\n ['-m',ext_name]+f_sources)\n else:\n log.debug(\" skipping f2py fortran files for '%s' (up-to-date)\"\\\n % (target_file))\n\n assert os.path.isfile(target_file),`target_file`+' missing'\n\n target_c = os.path.join(self.build_src,'fortranobject.c')\n target_h = os.path.join(self.build_src,'fortranobject.h')\n log.info(\" adding '%s' to sources.\" % (target_c))\n new_sources.append(target_c)\n if self.build_src not in extension.include_dirs:\n log.info(\" adding '%s' to include_dirs.\" \\\n % (self.build_src))\n extension.include_dirs.append(self.build_src)\n\n if not skip_f2py:\n import f2py2e\n d = os.path.dirname(f2py2e.__file__)\n source_c = os.path.join(d,'src','fortranobject.c')\n source_h = os.path.join(d,'src','fortranobject.h')\n if newer(source_c,target_c) or newer(source_h,target_h):\n self.mkpath(os.path.dirname(target_c))\n self.copy_file(source_c,target_c)\n self.copy_file(source_h,target_h)\n else:\n assert os.path.isfile(target_c),`target_c` + ' missing'\n assert os.path.isfile(target_h),`target_h` + ' missing'\n \n for name_ext in ['-f2pywrappers.f','-f2pywrappers2.f90']:\n filename = os.path.join(target_dir,ext_name + name_ext)\n if os.path.isfile(filename):\n log.info(\" adding '%s' to sources.\" % (filename))\n f_sources.append(filename)\n\n return new_sources + f_sources\n\n def swig_sources(self, sources, extension):\n # Assuming SWIG 1.3.14 or later. See compatibility note in\n # http://www.swig.org/Doc1.3/Python.html#Python_nn6\n\n new_sources = []\n swig_sources = []\n swig_targets = {}\n target_dirs = []\n py_files = [] # swig generated .py files\n target_ext = '.c'\n typ = None\n is_cpp = 0\n skip_swig = 0\n ext_name = extension.name.split('.')[-1]\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.i': # SWIG interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n py_target_dir = self.ext_target_dir\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n py_target_dir = target_dir\n if os.path.isfile(source):\n name = get_swig_modulename(source)\n assert name==ext_name[1:],'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name[1:]`\n if typ is None:\n typ = get_swig_target(source)\n is_cpp = typ=='c++'\n if is_cpp:\n target_ext = '.cpp'\n else:\n assert typ == get_swig_target(source),`typ`\n target_file = os.path.join(target_dir,'%s_wrap%s' \\\n % (name, target_ext))\n else:\n log.debug(' source %s does not exist: skipping swig\\'ing.' \\\n % (source))\n name = ext_name[1:]\n skip_swig = 1\n target_file = _find_swig_target(target_dir, name)\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %s_wrap.{c,cpp} was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = _find_swig_target(target_dir, name)\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n new_sources.append(target_file)\n py_files.append(os.path.join(py_target_dir, name+'.py'))\n swig_sources.append(source)\n swig_targets[source] = new_sources[-1]\n else:\n new_sources.append(source)\n\n if not swig_sources:\n return new_sources\n\n if skip_swig:\n return new_sources + py_files\n\n map(self.mkpath, target_dirs)\n swig = self.find_swig()\n swig_cmd = [swig, \"-python\"]\n if is_cpp:\n swig_cmd.append('-c++')\n for d in extension.include_dirs:\n swig_cmd.append('-I'+d)\n for source in swig_sources:\n target = swig_targets[source]\n depends = [source] + extension.depends\n if self.force or newer_group(depends, target, 'newer'):\n log.info(\"%s: %s\" % (os.path.basename(swig) \\\n + (is_cpp and '++' or ''), source))\n self.spawn(swig_cmd + self.swigflags \\\n + [\"-o\", target, '-outdir', py_target_dir, source])\n else:\n log.debug(\" skipping '%s' swig interface (up-to-date)\" \\\n % (source))\n\n return new_sources + py_files\n\ndef appendpath(prefix,path):\n if os.path.isabs(path):\n absprefix = os.path.abspath(prefix)\n d = os.path.commonprefix([absprefix,path])\n subpath = path[len(d):]\n assert not os.path.isabs(subpath),`subpath`\n return os.path.normpath(os.path.join(prefix,subpath))\n return os.path.normpath(os.path.join(prefix, path))\n\n#### SWIG related auxiliary functions ####\n_swig_module_name_match = re.compile(r'\\s*%module\\s*(?P[\\w_]+)',\n re.I).match\n_has_c_header = re.compile(r'-[*]-\\s*c\\s*-[*]-',re.I).search\n_has_cpp_header = re.compile(r'-[*]-\\s*c[+][+]\\s*-[*]-',re.I).search\n\ndef get_swig_target(source):\n f = open(source,'r')\n result = 'c'\n line = f.readline()\n if _has_cpp_header(line):\n result = 'c++'\n if _has_c_header(line):\n result = 'c'\n f.close()\n return result\n\ndef get_swig_modulename(source):\n f = open(source,'r')\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _swig_module_name_match(line)\n if m:\n name = m.group('name')\n break\n f.close()\n return name\n\ndef _find_swig_target(target_dir,name):\n for ext in ['.cpp','.c']:\n target = os.path.join(target_dir,'%s_wrap%s' % (name, ext))\n if os.path.isfile(target):\n break\n return target\n\n#### F2PY related auxiliary functions ####\n\n_f2py_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]+)',\n re.I).match\n_f2py_user_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]*?'\\\n '__user__[\\w_]*)',re.I).match\n\ndef get_f2py_modulename(source):\n name = None\n f = open(source)\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _f2py_module_name_match(line)\n if m:\n if _f2py_user_module_name_match(line): # skip *__user__* names\n continue\n name = m.group('name')\n break\n f.close()\n return name\n\n##########################################\n", "source_code_before": "\"\"\" Build swig, f2py, weave, sources.\n\"\"\"\n\nimport os\nimport re\n\nfrom distutils.cmd import Command\nfrom distutils.command import build_ext, build_py\nfrom distutils.util import convert_path\nfrom distutils.dep_util import newer_group, newer\n\nfrom scipy_distutils import log\nfrom scipy_distutils.misc_util import fortran_ext_match, all_strings, dot_join\nfrom scipy_distutils.from_template import process_file\nfrom scipy_distutils.extension import Extension\n\n_split_ext_template = '''\nimport os\nimport sys\n\n_which = None, None\n_backends = %(backends)r\nif hasattr(sys, \"argv\"):\n i = -1\n for a in sys.argv:\n i += 1\n if a.lower()[2:] in _backends: \n _which = a.lower()[2:], \"command line\"\n del sys.argv[i]\n os.environ[_which[0].upper()] = _which[0]\n break\n del a\n\nif _which[0] is None:\n for b in _backends:\n if os.environ.get(b.upper(),None):\n _which = b, \"environment var\"\n break\n del b\n\nif _which[0] is None:\n _which = _backends[0], \"defaulted\"\n\nexec \"import _\" + _which[0] + \".%(name)s as ___m\"\nfor ___a in dir(___m):\n exec ___a + \" = getattr(___m,___a)\"\nelse:\n del ___a, ___m\n'''\n\n\nclass build_src(build_ext.build_ext):\n\n description = \"build sources from SWIG, F2PY files or a function\"\n\n user_options = [\n ('build-src=', 'd', \"directory to \\\"build\\\" sources to\"),\n ('f2pyflags=', None, \"additonal flags to f2py\"),\n ('swigflags=', None, \"additional flags to swig\"),\n ('force', 'f', \"forcibly build everything (ignore file timestamps)\"),\n ('inplace', 'i',\n \"ignore build-lib and put compiled extensions into the source \" +\n \"directory alongside your pure Python modules\"),\n ]\n\n boolean_options = ['force','inplace']\n\n help_options = []\n\n def initialize_options(self):\n self.extensions = None\n self.package = None\n self.py_modules = None\n self.build_src = None\n self.build_lib = None\n self.build_base = None\n self.force = None\n self.inplace = None\n self.package_dir = None\n self.f2pyflags = None\n self.swigflags = None\n self.backends = None\n return\n\n def finalize_options(self):\n self.set_undefined_options('build',\n ('build_base', 'build_base'),\n ('build_lib', 'build_lib'),\n ('force', 'force'))\n if self.package is None:\n self.package = self.distribution.ext_package\n self.extensions = self.distribution.ext_modules\n self.libraries = self.distribution.libraries or []\n self.py_modules = self.distribution.py_modules\n if self.build_src is None:\n self.build_src = os.path.join(self.build_base, 'src')\n if self.inplace is None:\n build_ext = self.get_finalized_command('build_ext')\n self.inplace = build_ext.inplace\n if self.backends is None:\n build_ext = self.get_finalized_command('build_ext')\n self.backends = build_ext.backends\n\n # py_modules is used in build_py.find_package_modules\n self.py_modules = {}\n\n if self.f2pyflags is None:\n self.f2pyflags = []\n else:\n self.f2pyflags = self.f2pyflags.split() # XXX spaces??\n\n if self.swigflags is None:\n self.swigflags = []\n else:\n self.swigflags = self.swigflags.split() # XXX spaces??\n return\n\n def run(self):\n if not (self.extensions or self.libraries):\n return\n if self.backends is not None:\n self.backend_split()\n self.build_sources()\n return\n\n def backend_split(self):\n log.info('splitting extensions for backends: %s' % (self.backends))\n extensions = []\n backends = self.backends.split(',')\n for ext in self.extensions:\n name = ext.name.split('.')[-1]\n fullname = self.get_ext_fullname(ext.name)\n def func(extension, src_dir,\n name=name,\n fullname=fullname,\n backends=backends):\n source = os.path.join(os.path.dirname(src_dir),name+'.py')\n if newer(__file__, source):\n f = open(source,'w')\n f.write(_split_ext_template \\\n % {'name':name,'fullname':fullname,\n 'backends':backends})\n f.close()\n return [ source ]\n def func_init(extension, src_dir):\n source = os.path.join(src_dir,'__init__.py')\n if newer(__file__, source):\n f = open(source,'w')\n f.close()\n return [source]\n for b in backends:\n new_ext = self.split_extension(ext,b)\n new_ext.sources.append(func_init)\n extensions.append(new_ext)\n\n new_package = dot_join(*(ext.name.split('.')[:-1]+['_'+b]))\n new_package_dir = os.path.join(*([self.build_src]+ext.name.split('.')[:-1]+['_'+b]))\n if new_package not in self.distribution.packages:\n self.distribution.packages.append(new_package)\n self.distribution.package_dir[new_package] = new_package_dir\n\n ext.sources = [func]\n extensions.append(ext)\n self.extensions[:] = extensions\n\n def split_extension(self, ext, backend):\n fullname = self.get_ext_fullname(ext.name)\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n name = modpath[-1]\n macros = []\n macros.append((backend.upper(),None))\n new_ext = Extension(name = dot_join(package,'_%s.%s' % (backend,name)),\n sources = ext.sources,\n include_dirs = ext.include_dirs,\n define_macros = ext.define_macros + macros,\n undef_macros = ext.undef_macros,\n library_dirs = ext.library_dirs,\n libraries = ext.libraries,\n runtime_library_dirs = ext.runtime_library_dirs,\n extra_objects = ext.extra_objects,\n extra_compile_args = ext.extra_compile_args,\n extra_link_args = ext.extra_link_args,\n export_symbols = ext.export_symbols,\n depends = ext.depends,\n language = ext.language,\n f2py_options = ext.f2py_options,\n module_dirs = ext.module_dirs\n )\n new_ext.backend = backend\n return new_ext\n \n def build_sources(self):\n self.check_extensions_list(self.extensions)\n\n for ext in self.extensions:\n self.build_extension_sources(ext)\n\n for libname_info in self.libraries:\n self.build_library_sources(*libname_info)\n\n return\n\n def build_library_sources(self, lib_name, build_info):\n sources = list(build_info.get('sources',[]))\n\n if not sources:\n return\n\n log.info('building library \"%s\" sources' % (lib_name))\n\n sources = self.generate_sources(sources, (lib_name, build_info))\n\n build_info['sources'] = sources\n return\n\n def build_extension_sources(self, ext):\n sources = list(ext.sources)\n\n log.info('building extension \"%s\" sources' % (ext.name))\n\n fullname = self.get_ext_fullname(ext.name)\n\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n\n if self.inplace:\n build_py = self.get_finalized_command('build_py')\n self.ext_target_dir = build_py.get_package_dir(package)\n\n sources = self.generate_sources(sources, ext)\n\n sources = self.template_sources(sources, ext)\n \n sources = self.swig_sources(sources, ext)\n\n sources = self.f2py_sources(sources, ext)\n\n sources, py_files = self.filter_py_files(sources)\n\n if not self.py_modules.has_key(package):\n self.py_modules[package] = []\n modules = []\n for f in py_files:\n module = os.path.splitext(os.path.basename(f))[0]\n modules.append((package, module, f))\n self.py_modules[package] += modules\n\n ext.sources = sources\n return\n\n def generate_sources(self, sources, extension):\n new_sources = []\n func_sources = []\n for source in sources:\n if type(source) is type(''):\n new_sources.append(source)\n else:\n func_sources.append(source)\n if not func_sources:\n return new_sources\n if self.inplace:\n build_dir = self.ext_target_dir\n else:\n if type(extension) is type(()):\n name = extension[0]\n else:\n name = extension.name\n build_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n self.mkpath(build_dir)\n for func in func_sources:\n source = func(extension, build_dir)\n if type(source) is type([]):\n [log.info(\" adding '%s' to sources.\" % (s)) for s in source]\n new_sources.extend(source)\n else:\n log.info(\" adding '%s' to sources.\" % (source))\n new_sources.append(source)\n return new_sources\n\n def filter_py_files(self, sources):\n new_sources = []\n py_files = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext=='.py': \n py_files.append(source)\n else:\n new_sources.append(source)\n return new_sources, py_files\n\n def template_sources(self, sources, extension):\n new_sources = []\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.src': # Template file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n self.mkpath(target_dir)\n target_file = os.path.join(target_dir,os.path.basename(base))\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file)):\n log.info(\"from_template:> %s\" % (target_file))\n outstr = process_file(source)\n fid = open(target_file,'w')\n fid.write(outstr)\n fid.close()\n new_sources.append(target_file)\n else:\n new_sources.append(source)\n return new_sources \n \n def f2py_sources(self, sources, extension):\n new_sources = []\n f2py_sources = []\n f_sources = []\n f2py_targets = {}\n target_dirs = []\n ext_name = extension.name.split('.')[-1]\n skip_f2py = 0\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.pyf': # F2PY interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n if os.path.isfile(source):\n name = get_f2py_modulename(source)\n assert name==ext_name,'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name`\n target_file = os.path.join(target_dir,name+'module.c')\n else:\n log.debug(' source %s does not exist: skipping f2py\\'ing.' \\\n % (source))\n name = ext_name\n skip_f2py = 1\n target_file = os.path.join(target_dir,name+'module.c')\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %smodule.c was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = os.path.join(target_dir,name+'module.c')\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n f2py_sources.append(source)\n f2py_targets[source] = target_file\n new_sources.append(target_file)\n elif fortran_ext_match(ext):\n f_sources.append(source)\n else:\n new_sources.append(source)\n\n if not (f2py_sources or f_sources):\n return new_sources\n\n map(self.mkpath, target_dirs)\n\n f2py_options = extension.f2py_options + self.f2pyflags\n if f2py_sources:\n assert len(f2py_sources)==1,\\\n 'only one .pyf file is allowed per extension module but got'\\\n ' more:'+`f2py_sources`\n source = f2py_sources[0]\n target_file = f2py_targets[source]\n target_dir = os.path.dirname(target_file) or '.'\n depends = [source] + extension.depends\n if (self.force or newer_group(depends, target_file,'newer')) \\\n and not skip_f2py:\n log.info(\"f2py: %s\" % (source))\n import f2py2e\n f2py2e.run_main(f2py_options + ['--build-dir',target_dir,source])\n else:\n log.debug(\" skipping '%s' f2py interface (up-to-date)\" % (source))\n else:\n #XXX TODO: --inplace support for sdist command\n if type(extension) is type(()): name = extension[0]\n else: name = extension.name\n target_dir = os.path.join(*([self.build_src]\\\n +name.split('.')[:-1]))\n target_file = os.path.join(target_dir,ext_name + 'module.c')\n new_sources.append(target_file)\n depends = f_sources + extension.depends\n if (self.force or newer_group(depends, target_file, 'newer')) \\\n and not skip_f2py:\n import f2py2e\n log.info(\"f2py:> %s\" % (target_file))\n self.mkpath(target_dir)\n f2py2e.run_main(f2py_options + ['--lower',\n '--build-dir',target_dir]+\\\n ['-m',ext_name]+f_sources)\n else:\n log.debug(\" skipping f2py fortran files for '%s' (up-to-date)\"\\\n % (target_file))\n\n assert os.path.isfile(target_file),`target_file`+' missing'\n\n target_c = os.path.join(self.build_src,'fortranobject.c')\n target_h = os.path.join(self.build_src,'fortranobject.h')\n log.info(\" adding '%s' to sources.\" % (target_c))\n new_sources.append(target_c)\n if self.build_src not in extension.include_dirs:\n log.info(\" adding '%s' to include_dirs.\" \\\n % (self.build_src))\n extension.include_dirs.append(self.build_src)\n\n if not skip_f2py:\n import f2py2e\n d = os.path.dirname(f2py2e.__file__)\n source_c = os.path.join(d,'src','fortranobject.c')\n source_h = os.path.join(d,'src','fortranobject.h')\n if newer(source_c,target_c) or newer(source_h,target_h):\n self.mkpath(os.path.dirname(target_c))\n self.copy_file(source_c,target_c)\n self.copy_file(source_h,target_h)\n else:\n assert os.path.isfile(target_c),`target_c` + ' missing'\n assert os.path.isfile(target_h),`target_h` + ' missing'\n \n for name_ext in ['-f2pywrappers.f','-f2pywrappers2.f90']:\n filename = os.path.join(target_dir,ext_name + name_ext)\n if os.path.isfile(filename):\n log.info(\" adding '%s' to sources.\" % (filename))\n f_sources.append(filename)\n\n return new_sources + f_sources\n\n def swig_sources(self, sources, extension):\n # Assuming SWIG 1.3.14 or later. See compatibility note in\n # http://www.swig.org/Doc1.3/Python.html#Python_nn6\n\n new_sources = []\n swig_sources = []\n swig_targets = {}\n target_dirs = []\n py_files = [] # swig generated .py files\n target_ext = '.c'\n typ = None\n is_cpp = 0\n skip_swig = 0\n ext_name = extension.name.split('.')[-1]\n\n for source in sources:\n (base, ext) = os.path.splitext(source)\n if ext == '.i': # SWIG interface file\n if self.inplace:\n target_dir = os.path.dirname(base)\n py_target_dir = self.ext_target_dir\n else:\n target_dir = appendpath(self.build_src, os.path.dirname(base))\n py_target_dir = target_dir\n if os.path.isfile(source):\n name = get_swig_modulename(source)\n assert name==ext_name[1:],'mismatch of extension names: '\\\n +source+' provides'\\\n ' '+`name`+' but expected '+`ext_name[1:]`\n if typ is None:\n typ = get_swig_target(source)\n is_cpp = typ=='c++'\n if is_cpp:\n target_ext = '.cpp'\n else:\n assert typ == get_swig_target(source),`typ`\n target_file = os.path.join(target_dir,'%s_wrap%s' \\\n % (name, target_ext))\n else:\n log.debug(' source %s does not exist: skipping swig\\'ing.' \\\n % (source))\n name = ext_name[1:]\n skip_swig = 1\n target_file = _find_swig_target(target_dir, name)\n if not os.path.isfile(target_file):\n log.debug(' target %s does not exist:\\n '\\\n 'Assuming %s_wrap.{c,cpp} was generated with '\\\n '\"build_src --inplace\" command.' \\\n % (target_file, name))\n target_dir = os.path.dirname(base)\n target_file = _find_swig_target(target_dir, name)\n assert os.path.isfile(target_file),`target_file`+' missing'\n log.debug(' Yes! Using %s as up-to-date target.' \\\n % (target_file))\n target_dirs.append(target_dir)\n new_sources.append(target_file)\n py_files.append(os.path.join(py_target_dir, name+'.py'))\n swig_sources.append(source)\n swig_targets[source] = new_sources[-1]\n else:\n new_sources.append(source)\n\n if not swig_sources:\n return new_sources\n\n if skip_swig:\n return new_sources + py_files\n\n map(self.mkpath, target_dirs)\n swig = self.find_swig()\n swig_cmd = [swig, \"-python\"]\n if is_cpp:\n swig_cmd.append('-c++')\n for d in extension.include_dirs:\n swig_cmd.append('-I'+d)\n for source in swig_sources:\n target = swig_targets[source]\n depends = [source] + extension.depends\n if self.force or newer_group(depends, target, 'newer'):\n log.info(\"%s: %s\" % (os.path.basename(swig) \\\n + (is_cpp and '++' or ''), source))\n self.spawn(swig_cmd + self.swigflags \\\n + [\"-o\", target, '-outdir', py_target_dir, source])\n else:\n log.debug(\" skipping '%s' swig interface (up-to-date)\" \\\n % (source))\n\n return new_sources + py_files\n\ndef appendpath(prefix,path):\n if os.path.isabs(path):\n absprefix = os.path.abspath(prefix)\n d = os.path.commonprefix([absprefix,path])\n subpath = path[len(d):]\n assert not os.path.isabs(subpath),`subpath`\n return os.path.normpath(os.path.join(prefix,subpath))\n return os.path.normpath(os.path.join(prefix, path))\n\n#### SWIG related auxiliary functions ####\n_swig_module_name_match = re.compile(r'\\s*%module\\s*(?P[\\w_]+)',\n re.I).match\n_has_c_header = re.compile(r'-[*]-\\s*c\\s*-[*]-',re.I).search\n_has_cpp_header = re.compile(r'-[*]-\\s*c[+][+]\\s*-[*]-',re.I).search\n\ndef get_swig_target(source):\n f = open(source,'r')\n result = 'c'\n line = f.readline()\n if _has_cpp_header(line):\n result = 'c++'\n if _has_c_header(line):\n result = 'c'\n f.close()\n return result\n\ndef get_swig_modulename(source):\n f = open(source,'r')\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _swig_module_name_match(line)\n if m:\n name = m.group('name')\n break\n f.close()\n return name\n\ndef _find_swig_target(target_dir,name):\n for ext in ['.cpp','.c']:\n target = os.path.join(target_dir,'%s_wrap%s' % (name, ext))\n if os.path.isfile(target):\n break\n return target\n\n#### F2PY related auxiliary functions ####\n\n_f2py_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]+)',\n re.I).match\n_f2py_user_module_name_match = re.compile(r'\\s*python\\s*module\\s*(?P[\\w_]*?'\\\n '__user__[\\w_]*)',re.I).match\n\ndef get_f2py_modulename(source):\n name = None\n f = open(source)\n f_readlines = getattr(f,'xreadlines',f.readlines)\n for line in f_readlines():\n m = _f2py_module_name_match(line)\n if m:\n if _f2py_user_module_name_match(line): # skip *__user__* names\n continue\n name = m.group('name')\n break\n f.close()\n return name\n\n##########################################\n", "methods": [ { "name": "_get_constructor_argnames", "long_name": "_get_constructor_argnames( obj )", "filename": "build_src.py", "nloc": 2, "complexity": 1, "token_count": 19, "parameters": [ "obj" ], "start_line": 53, "end_line": 54, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 0 }, { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_src.py", "nloc": 14, "complexity": 1, "token_count": 66, "parameters": [ "self" ], "start_line": 74, "end_line": 87, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 1 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_src.py", "nloc": 28, "complexity": 8, "token_count": 201, "parameters": [ "self" ], "start_line": 89, "end_line": 120, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 32, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_src.py", "nloc": 9, "complexity": 4, "token_count": 45, "parameters": [ "self" ], "start_line": 122, "end_line": 130, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "apply_backend_info", "long_name": "apply_backend_info( self , backend )", "filename": "build_src.py", "nloc": 16, "complexity": 4, "token_count": 101, "parameters": [ "self", "backend" ], "start_line": 132, "end_line": 147, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 1 }, { "name": "backend_split.func", "long_name": "backend_split.func( extension , src_dir , name = name , fullname = fullname , backends = backends )", "filename": "build_src.py", "nloc": 12, "complexity": 2, "token_count": 86, "parameters": [ "extension", "src_dir", "name", "fullname", "backends" ], "start_line": 159, "end_line": 170, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 3 }, { "name": "backend_split.func_init", "long_name": "backend_split.func_init( extension , src_dir )", "filename": "build_src.py", "nloc": 6, "complexity": 2, "token_count": 44, "parameters": [ "extension", "src_dir" ], "start_line": 171, "end_line": 176, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 3 }, { "name": "backend_split", "long_name": "backend_split( self )", "filename": "build_src.py", "nloc": 25, "complexity": 5, "token_count": 229, "parameters": [ "self" ], "start_line": 149, "end_line": 191, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 43, "top_nesting_level": 1 }, { "name": "split_extension", "long_name": "split_extension( self , ext , backend )", "filename": "build_src.py", "nloc": 17, "complexity": 4, "token_count": 134, "parameters": [ "self", "ext", "backend" ], "start_line": 193, "end_line": 209, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 1 }, { "name": "build_sources", "long_name": "build_sources( self )", "filename": "build_src.py", "nloc": 7, "complexity": 3, "token_count": 41, "parameters": [ "self" ], "start_line": 211, "end_line": 220, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "build_library_sources", "long_name": "build_library_sources( self , lib_name , build_info )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 59, "parameters": [ "self", "lib_name", "build_info" ], "start_line": 222, "end_line": 233, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 1 }, { "name": "build_extension_sources", "long_name": "build_extension_sources( self , ext )", "filename": "build_src.py", "nloc": 23, "complexity": 4, "token_count": 207, "parameters": [ "self", "ext" ], "start_line": 235, "end_line": 268, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 34, "top_nesting_level": 1 }, { "name": "generate_sources", "long_name": "generate_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 29, "complexity": 9, "token_count": 193, "parameters": [ "self", "sources", "extension" ], "start_line": 270, "end_line": 298, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 1 }, { "name": "filter_py_files", "long_name": "filter_py_files( self , sources )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "self", "sources" ], "start_line": 300, "end_line": 309, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "template_sources", "long_name": "template_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 22, "complexity": 6, "token_count": 170, "parameters": [ "self", "sources", "extension" ], "start_line": 311, "end_line": 332, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "f2py_sources", "long_name": "f2py_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 110, "complexity": 24, "token_count": 874, "parameters": [ "self", "sources", "extension" ], "start_line": 334, "end_line": 453, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 120, "top_nesting_level": 1 }, { "name": "swig_sources", "long_name": "swig_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 80, "complexity": 17, "token_count": 539, "parameters": [ "self", "sources", "extension" ], "start_line": 455, "end_line": 542, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 88, "top_nesting_level": 1 }, { "name": "appendpath", "long_name": "appendpath( prefix , path )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 101, "parameters": [ "prefix", "path" ], "start_line": 544, "end_line": 551, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "get_swig_target", "long_name": "get_swig_target( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 48, "parameters": [ "source" ], "start_line": 559, "end_line": 568, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "get_swig_modulename", "long_name": "get_swig_modulename( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "source" ], "start_line": 570, "end_line": 579, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "_find_swig_target", "long_name": "_find_swig_target( target_dir , name )", "filename": "build_src.py", "nloc": 6, "complexity": 3, "token_count": 47, "parameters": [ "target_dir", "name" ], "start_line": 581, "end_line": 586, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "get_f2py_modulename", "long_name": "get_f2py_modulename( source )", "filename": "build_src.py", "nloc": 13, "complexity": 4, "token_count": 65, "parameters": [ "source" ], "start_line": 595, "end_line": 607, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 0 } ], "methods_before": [ { "name": "initialize_options", "long_name": "initialize_options( self )", "filename": "build_src.py", "nloc": 14, "complexity": 1, "token_count": 66, "parameters": [ "self" ], "start_line": 70, "end_line": 83, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 1 }, { "name": "finalize_options", "long_name": "finalize_options( self )", "filename": "build_src.py", "nloc": 28, "complexity": 8, "token_count": 201, "parameters": [ "self" ], "start_line": 85, "end_line": 116, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 32, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_src.py", "nloc": 7, "complexity": 4, "token_count": 37, "parameters": [ "self" ], "start_line": 118, "end_line": 124, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "backend_split.func", "long_name": "backend_split.func( extension , src_dir , name = name , fullname = fullname , backends = backends )", "filename": "build_src.py", "nloc": 12, "complexity": 2, "token_count": 86, "parameters": [ "extension", "src_dir", "name", "fullname", "backends" ], "start_line": 133, "end_line": 144, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 3 }, { "name": "backend_split.func_init", "long_name": "backend_split.func_init( extension , src_dir )", "filename": "build_src.py", "nloc": 6, "complexity": 2, "token_count": 44, "parameters": [ "extension", "src_dir" ], "start_line": 145, "end_line": 150, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 3 }, { "name": "backend_split", "long_name": "backend_split( self )", "filename": "build_src.py", "nloc": 21, "complexity": 4, "token_count": 210, "parameters": [ "self" ], "start_line": 126, "end_line": 164, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 39, "top_nesting_level": 1 }, { "name": "split_extension", "long_name": "split_extension( self , ext , backend )", "filename": "build_src.py", "nloc": 26, "complexity": 1, "token_count": 184, "parameters": [ "self", "ext", "backend" ], "start_line": 166, "end_line": 191, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "build_sources", "long_name": "build_sources( self )", "filename": "build_src.py", "nloc": 7, "complexity": 3, "token_count": 41, "parameters": [ "self" ], "start_line": 193, "end_line": 202, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "build_library_sources", "long_name": "build_library_sources( self , lib_name , build_info )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 59, "parameters": [ "self", "lib_name", "build_info" ], "start_line": 204, "end_line": 215, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 1 }, { "name": "build_extension_sources", "long_name": "build_extension_sources( self , ext )", "filename": "build_src.py", "nloc": 23, "complexity": 4, "token_count": 207, "parameters": [ "self", "ext" ], "start_line": 217, "end_line": 250, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 34, "top_nesting_level": 1 }, { "name": "generate_sources", "long_name": "generate_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 29, "complexity": 9, "token_count": 193, "parameters": [ "self", "sources", "extension" ], "start_line": 252, "end_line": 280, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 1 }, { "name": "filter_py_files", "long_name": "filter_py_files( self , sources )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "self", "sources" ], "start_line": 282, "end_line": 291, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "template_sources", "long_name": "template_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 22, "complexity": 6, "token_count": 170, "parameters": [ "self", "sources", "extension" ], "start_line": 293, "end_line": 314, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "f2py_sources", "long_name": "f2py_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 110, "complexity": 24, "token_count": 874, "parameters": [ "self", "sources", "extension" ], "start_line": 316, "end_line": 435, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 120, "top_nesting_level": 1 }, { "name": "swig_sources", "long_name": "swig_sources( self , sources , extension )", "filename": "build_src.py", "nloc": 80, "complexity": 17, "token_count": 539, "parameters": [ "self", "sources", "extension" ], "start_line": 437, "end_line": 524, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 88, "top_nesting_level": 1 }, { "name": "appendpath", "long_name": "appendpath( prefix , path )", "filename": "build_src.py", "nloc": 8, "complexity": 2, "token_count": 101, "parameters": [ "prefix", "path" ], "start_line": 526, "end_line": 533, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "get_swig_target", "long_name": "get_swig_target( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 48, "parameters": [ "source" ], "start_line": 541, "end_line": 550, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "get_swig_modulename", "long_name": "get_swig_modulename( source )", "filename": "build_src.py", "nloc": 10, "complexity": 3, "token_count": 57, "parameters": [ "source" ], "start_line": 552, "end_line": 561, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "_find_swig_target", "long_name": "_find_swig_target( target_dir , name )", "filename": "build_src.py", "nloc": 6, "complexity": 3, "token_count": 47, "parameters": [ "target_dir", "name" ], "start_line": 563, "end_line": 568, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "get_f2py_modulename", "long_name": "get_f2py_modulename( source )", "filename": "build_src.py", "nloc": 13, "complexity": 4, "token_count": 65, "parameters": [ "source" ], "start_line": 577, "end_line": 589, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "apply_backend_info", "long_name": "apply_backend_info( self , backend )", "filename": "build_src.py", "nloc": 16, "complexity": 4, "token_count": 101, "parameters": [ "self", "backend" ], "start_line": 132, "end_line": 147, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 1 }, { "name": "backend_split", "long_name": "backend_split( self )", "filename": "build_src.py", "nloc": 25, "complexity": 5, "token_count": 229, "parameters": [ "self" ], "start_line": 149, "end_line": 191, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 43, "top_nesting_level": 1 }, { "name": "split_extension", "long_name": "split_extension( self , ext , backend )", "filename": "build_src.py", "nloc": 17, "complexity": 4, "token_count": 134, "parameters": [ "self", "ext", "backend" ], "start_line": 193, "end_line": 209, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 1 }, { "name": "run", "long_name": "run( self )", "filename": "build_src.py", "nloc": 9, "complexity": 4, "token_count": 45, "parameters": [ "self" ], "start_line": 122, "end_line": 130, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "_get_constructor_argnames", "long_name": "_get_constructor_argnames( obj )", "filename": "build_src.py", "nloc": 2, "complexity": 1, "token_count": 19, "parameters": [ "obj" ], "start_line": 53, "end_line": 54, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 0 } ], "nloc": 531, "complexity": 114, "token_count": 3618, "diff_parsed": { "added": [ "import copy", "from scipy_distutils.system_info import get_info, dict_append", "def _get_constructor_argnames(obj):", " return obj.__init__.im_func.func_code.co_varnames[1:]", " else:", " self.apply_backend_info('numeric')", " def apply_backend_info(self,backend):", " log.info('applying backend (%s) info to extensions'\\", " % (backend))", " backend_info = get_info(backend,notfound_action=1)", " if not backend_info:", " return", " extensions = []", " for ext in self.extensions:", " ext_args = {}", " for a in _get_constructor_argnames(ext):", " ext_args[a] = copy.copy(getattr(ext,a))", " dict_append(ext_args,**backend_info)", " new_ext = Extension(**ext_args)", " extensions.append(new_ext)", " self.extensions[:] = extensions", " return", "", " backends = self.backends.split(',')", " if len(backends)==1:", " self.apply_backend_info(backends[0])", " return", " return", " ext_args = {}", " for a in _get_constructor_argnames(ext):", " if a=='name':", " ext_args[a] = dot_join(package,'_%s.%s' % (backend,name))", " else:", " ext_args[a] = copy.copy(getattr(ext,a))", " backend_info = get_info(backend,notfound_action=1)", " if backend_info:", " dict_append(ext_args,**backend_info)", " new_ext = Extension(**ext_args)" ], "deleted": [ " backends = self.backends.split(',')", " macros = []", " macros.append((backend.upper(),None))", " new_ext = Extension(name = dot_join(package,'_%s.%s' % (backend,name)),", " sources = ext.sources,", " include_dirs = ext.include_dirs,", " define_macros = ext.define_macros + macros,", " undef_macros = ext.undef_macros,", " library_dirs = ext.library_dirs,", " libraries = ext.libraries,", " runtime_library_dirs = ext.runtime_library_dirs,", " extra_objects = ext.extra_objects,", " extra_compile_args = ext.extra_compile_args,", " extra_link_args = ext.extra_link_args,", " export_symbols = ext.export_symbols,", " depends = ext.depends,", " language = ext.language,", " f2py_options = ext.f2py_options,", " module_dirs = ext.module_dirs", " )" ] } }, { "old_path": "scipy_distutils/system_info.py", "new_path": "scipy_distutils/system_info.py", "filename": "system_info.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -166,6 +166,7 @@ def get_info(name,notfound_action=0):\n 'lapack_src':lapack_src_info,\n 'blas_src':blas_src_info,\n 'numpy':numpy_info,\n+ 'numeric':numpy_info, # alias to numpy, for build_ext --backends support\n 'numarray':numarray_info,\n 'lapack_opt':lapack_opt_info,\n 'blas_opt':blas_opt_info,\n@@ -1187,7 +1188,8 @@ def calc_info(self):\n return\n info = {}\n macros = [(self.modulename.upper()+'_VERSION',\n- '\"\\\\\"%s\\\\\"\"' % (module.__version__))]\n+ '\"\\\\\"%s\\\\\"\"' % (module.__version__)),\n+ (self.modulename.upper(),None)]\n ## try:\n ## macros.append(\n ## (self.modulename.upper()+'_VERSION_HEX',\n", "added_lines": 3, "deleted_lines": 1, "source_code": "#!/usr/bin/env python\n\"\"\"\nThis file defines a set of system_info classes for getting\ninformation about various resources (libraries, library directories,\ninclude directories, etc.) in the system. Currently, the following\nclasses are available:\n\n atlas_info\n atlas_threads_info\n atlas_blas_info\n atlas_blas_threads_info\n lapack_atlas_info\n blas_info\n lapack_info\n blas_opt_info # usage recommended\n lapack_opt_info # usage recommended\n fftw_info,dfftw_info,sfftw_info\n fftw_threads_info,dfftw_threads_info,sfftw_threads_info\n djbfft_info\n x11_info\n lapack_src_info\n blas_src_info\n numpy_info\n numarray_info\n boost_python_info\n agg2_info\n wx_info\n gdk_pixbuf_xlib_2_info\n gdk_pixbuf_2_info\n gdk_x11_2_info\n gtkp_x11_2_info\n gtkp_2_info\n xft_info\n freetype2_info\n\nUsage:\n info_dict = get_info()\n where is a string 'atlas','x11','fftw','lapack','blas',\n 'lapack_src', 'blas_src', etc. For a complete list of allowed names,\n see the definition of get_info() function below.\n\n Returned info_dict is a dictionary which is compatible with\n distutils.setup keyword arguments. If info_dict == {}, then the\n asked resource is not available (system_info could not find it).\n\n Several *_info classes specify an environment variable to specify\n the locations of software. When setting the corresponding environment\n variable to 'None' then the software will be ignored, even when it\n is available in system.\n\nGlobal parameters:\n system_info.search_static_first - search static libraries (.a)\n in precedence to shared ones (.so, .sl) if enabled.\n system_info.verbosity - output the results to stdout if enabled.\n\nThe file 'site.cfg' in the same directory as this module is read\nfor configuration options. The format is that used by ConfigParser (i.e.,\nWindows .INI style). The section DEFAULT has options that are the default\nfor each section. The available sections are fftw, atlas, and x11. Appropiate\ndefaults are used if nothing is specified.\n\nThe order of finding the locations of resources is the following:\n 1. environment variable\n 2. section in site.cfg\n 3. DEFAULT section in site.cfg\nOnly the first complete match is returned.\n\nExample:\n----------\n[DEFAULT]\nlibrary_dirs = /usr/lib:/usr/local/lib:/opt/lib\ninclude_dirs = /usr/include:/usr/local/include:/opt/include\nsrc_dirs = /usr/local/src:/opt/src\n# search static libraries (.a) in preference to shared ones (.so)\nsearch_static_first = 0\n\n[fftw]\nfftw_libs = rfftw, fftw\nfftw_opt_libs = rfftw_threaded, fftw_threaded\n# if the above aren't found, look for {s,d}fftw_libs and {s,d}fftw_opt_libs\n\n[atlas]\nlibrary_dirs = /usr/lib/3dnow:/usr/lib/3dnow/atlas\n# for overriding the names of the atlas libraries\natlas_libs = lapack, f77blas, cblas, atlas\n\n[x11]\nlibrary_dirs = /usr/X11R6/lib\ninclude_dirs = /usr/X11R6/include\n----------\n\nAuthors:\n Pearu Peterson , February 2002\n David M. Cooke , April 2002\n\nCopyright 2002 Pearu Peterson all rights reserved,\nPearu Peterson \nPermission to use, modify, and distribute this software is given under the \nterms of the SciPy (BSD style) license. See LICENSE.txt that came with\nthis distribution for specifics.\n\nNO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\"\"\"\n\n__revision__ = '$Id$'\nimport sys,os,re,types\nimport warnings\nfrom distutils.errors import DistutilsError\nfrom glob import glob\nimport ConfigParser\nfrom exec_command import find_executable, exec_command\n\nfrom distutils.sysconfig import get_config_vars\n\nif sys.platform == 'win32':\n default_lib_dirs = ['C:\\\\'] # probably not very helpful...\n default_include_dirs = []\n default_src_dirs = ['.']\n default_x11_lib_dirs = []\n default_x11_include_dirs = []\nelse:\n default_lib_dirs = ['/usr/local/lib', '/opt/lib', '/usr/lib',\n '/sw/lib']\n default_include_dirs = ['/usr/local/include',\n '/opt/include', '/usr/include',\n '/sw/include']\n default_src_dirs = ['.','/usr/local/src', '/opt/src','/sw/src']\n default_x11_lib_dirs = ['/usr/X11R6/lib','/usr/X11/lib','/usr/lib']\n default_x11_include_dirs = ['/usr/X11R6/include','/usr/X11/include',\n '/usr/include']\n\nif os.path.join(sys.prefix, 'lib') not in default_lib_dirs:\n default_lib_dirs.insert(0,os.path.join(sys.prefix, 'lib'))\n default_include_dirs.append(os.path.join(sys.prefix, 'include'))\n default_src_dirs.append(os.path.join(sys.prefix, 'src'))\n\ndefault_lib_dirs = filter(os.path.isdir, default_lib_dirs)\ndefault_include_dirs = filter(os.path.isdir, default_include_dirs)\ndefault_src_dirs = filter(os.path.isdir, default_src_dirs)\n\nso_ext = get_config_vars('SO')[0] or ''\n\ndef get_info(name,notfound_action=0):\n \"\"\"\n notfound_action:\n 0 - do nothing\n 1 - display warning message\n 2 - raise error\n \"\"\"\n cl = {'atlas':atlas_info, # use lapack_opt or blas_opt instead\n 'atlas_threads':atlas_threads_info, # ditto\n 'atlas_blas':atlas_blas_info,\n 'atlas_blas_threads':atlas_blas_threads_info,\n 'lapack_atlas':lapack_atlas_info, # use lapack_opt instead\n 'lapack_atlas_threads':lapack_atlas_threads_info, # ditto\n 'x11':x11_info,\n 'fftw':fftw_info,\n 'dfftw':dfftw_info,\n 'sfftw':sfftw_info,\n 'fftw_threads':fftw_threads_info,\n 'dfftw_threads':dfftw_threads_info,\n 'sfftw_threads':sfftw_threads_info,\n 'djbfft':djbfft_info,\n 'blas':blas_info, # use blas_opt instead\n 'lapack':lapack_info, # use lapack_opt instead\n 'lapack_src':lapack_src_info,\n 'blas_src':blas_src_info,\n 'numpy':numpy_info,\n 'numeric':numpy_info, # alias to numpy, for build_ext --backends support\n 'numarray':numarray_info,\n 'lapack_opt':lapack_opt_info,\n 'blas_opt':blas_opt_info,\n 'boost_python':boost_python_info,\n 'agg2':agg2_info,\n 'wx':wx_info,\n 'gdk_pixbuf_xlib_2':gdk_pixbuf_xlib_2_info,\n 'gdk-pixbuf-xlib-2.0':gdk_pixbuf_xlib_2_info,\n 'gdk_pixbuf_2':gdk_pixbuf_2_info,\n 'gdk-pixbuf-2.0':gdk_pixbuf_2_info,\n 'gdk':gdk_info,\n 'gdk_2':gdk_2_info,\n 'gdk-2.0':gdk_2_info,\n 'gdk_x11_2':gdk_x11_2_info,\n 'gdk-x11-2.0':gdk_x11_2_info,\n 'gtkp_x11_2':gtkp_x11_2_info,\n 'gtk+-x11-2.0':gtkp_x11_2_info,\n 'gtkp_2':gtkp_2_info,\n 'gtk+-2.0':gtkp_2_info,\n 'xft':xft_info,\n 'freetype2':freetype2_info,\n }.get(name.lower(),system_info)\n return cl().get_info(notfound_action)\n\nclass NotFoundError(DistutilsError):\n \"\"\"Some third-party program or library is not found.\"\"\"\n\nclass AtlasNotFoundError(NotFoundError):\n \"\"\"\n Atlas (http://math-atlas.sourceforge.net/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [atlas]) or by setting\n the ATLAS environment variable.\"\"\"\n\nclass LapackNotFoundError(NotFoundError):\n \"\"\"\n Lapack (http://www.netlib.org/lapack/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [lapack]) or by setting\n the LAPACK environment variable.\"\"\"\n\nclass LapackSrcNotFoundError(LapackNotFoundError):\n \"\"\"\n Lapack (http://www.netlib.org/lapack/) sources not found.\n Directories to search for the sources can be specified in the\n scipy_distutils/site.cfg file (section [lapack_src]) or by setting\n the LAPACK_SRC environment variable.\"\"\"\n\nclass BlasNotFoundError(NotFoundError):\n \"\"\"\n Blas (http://www.netlib.org/blas/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [blas]) or by setting\n the BLAS environment variable.\"\"\"\n\nclass BlasSrcNotFoundError(BlasNotFoundError):\n \"\"\"\n Blas (http://www.netlib.org/blas/) sources not found.\n Directories to search for the sources can be specified in the\n scipy_distutils/site.cfg file (section [blas_src]) or by setting\n the BLAS_SRC environment variable.\"\"\"\n\nclass FFTWNotFoundError(NotFoundError):\n \"\"\"\n FFTW (http://www.fftw.org/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [fftw]) or by setting\n the FFTW environment variable.\"\"\"\n\nclass DJBFFTNotFoundError(NotFoundError):\n \"\"\"\n DJBFFT (http://cr.yp.to/djbfft.html) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [djbfft]) or by setting\n the DJBFFT environment variable.\"\"\"\n\nclass F2pyNotFoundError(NotFoundError):\n \"\"\"\n f2py2e (http://cens.ioc.ee/projects/f2py2e/) module not found.\n Get it from above location, install it, and retry setup.py.\"\"\"\n\nclass NumericNotFoundError(NotFoundError):\n \"\"\"\n Numeric (http://www.numpy.org/) module not found.\n Get it from above location, install it, and retry setup.py.\"\"\"\n\nclass X11NotFoundError(NotFoundError):\n \"\"\"X11 libraries not found.\"\"\"\n\nclass system_info:\n\n \"\"\" get_info() is the only public method. Don't use others.\n \"\"\"\n section = 'DEFAULT'\n dir_env_var = None\n search_static_first = 0 # XXX: disabled by default, may disappear in\n # future unless it is proved to be useful.\n verbosity = 1\n saved_results = {}\n\n notfounderror = NotFoundError\n\n def __init__ (self,\n default_lib_dirs=default_lib_dirs,\n default_include_dirs=default_include_dirs,\n verbosity = 1,\n ):\n self.__class__.info = {}\n self.local_prefixes = []\n defaults = {}\n defaults['libraries'] = ''\n defaults['library_dirs'] = os.pathsep.join(default_lib_dirs)\n defaults['include_dirs'] = os.pathsep.join(default_include_dirs)\n defaults['src_dirs'] = os.pathsep.join(default_src_dirs)\n defaults['search_static_first'] = str(self.search_static_first)\n self.cp = ConfigParser.ConfigParser(defaults)\n try:\n f = __file__\n except NameError,msg:\n f = sys.argv[0]\n cf = os.path.join(os.path.split(os.path.abspath(f))[0],\n 'site.cfg')\n self.cp.read([cf])\n if not self.cp.has_section(self.section):\n self.cp.add_section(self.section)\n self.search_static_first = self.cp.getboolean(self.section,\n 'search_static_first')\n assert isinstance(self.search_static_first, type(0))\n\n def calc_libraries_info(self):\n libs = self.get_libraries()\n dirs = self.get_lib_dirs()\n info = {}\n for lib in libs:\n i = None\n for d in dirs:\n i = self.check_libs(d,[lib]) \n if i is not None:\n break\n if i is not None:\n dict_append(info,**i)\n else:\n print 'Library %s was not found. Ignoring' % (lib)\n return info\n\n def set_info(self,**info):\n if info: \n lib_info = self.calc_libraries_info()\n dict_append(info,**lib_info)\n self.saved_results[self.__class__.__name__] = info\n\n def has_info(self):\n return self.saved_results.has_key(self.__class__.__name__)\n\n def get_info(self,notfound_action=0):\n \"\"\" Return a dictonary with items that are compatible\n with scipy_distutils.setup keyword arguments.\n \"\"\"\n flag = 0\n if not self.has_info():\n flag = 1\n if self.verbosity>0:\n print self.__class__.__name__ + ':'\n if hasattr(self, 'calc_info'):\n self.calc_info()\n if notfound_action:\n if not self.has_info():\n if notfound_action==1:\n warnings.warn(self.notfounderror.__doc__)\n elif notfound_action==2:\n raise self.notfounderror,self.notfounderror.__doc__\n else:\n raise ValueError,`notfound_action`\n\n if self.verbosity>0:\n if not self.has_info():\n print ' NOT AVAILABLE'\n self.set_info()\n else:\n print ' FOUND:'\n \n res = self.saved_results.get(self.__class__.__name__)\n if self.verbosity>0 and flag:\n for k,v in res.items():\n v = str(v)\n if k=='sources' and len(v)>200: v = v[:60]+' ...\\n... '+v[-60:]\n print ' %s = %s'%(k,v)\n print\n \n return res\n\n def get_paths(self, section, key):\n dirs = self.cp.get(section, key).split(os.pathsep)\n env_var = self.dir_env_var\n if env_var:\n if type(env_var) is type([]):\n e0 = env_var[-1]\n for e in env_var:\n if os.environ.has_key(e):\n e0 = e\n break\n if not env_var[0]==e0:\n print 'Setting %s=%s' % (env_var[0],e0)\n env_var = e0\n if env_var and os.environ.has_key(env_var):\n d = os.environ[env_var]\n if d=='None':\n print 'Disabled',self.__class__.__name__,'(%s is None)' \\\n % (self.dir_env_var)\n return []\n if os.path.isfile(d):\n dirs = [os.path.dirname(d)] + dirs\n l = getattr(self,'_lib_names',[])\n if len(l)==1:\n b = os.path.basename(d)\n b = os.path.splitext(b)[0]\n if b[:3]=='lib':\n print 'Replacing _lib_names[0]==%r with %r' \\\n % (self._lib_names[0], b[3:])\n self._lib_names[0] = b[3:]\n else:\n ds = d.split(os.pathsep)\n ds2 = []\n for d in ds:\n if os.path.isdir(d):\n ds2.append(d)\n for dd in ['include','lib']:\n d1 = os.path.join(d,dd)\n if os.path.isdir(d1):\n ds2.append(d1)\n dirs = ds2 + dirs\n default_dirs = self.cp.get('DEFAULT', key).split(os.pathsep)\n dirs.extend(default_dirs)\n ret = []\n [ret.append(d) for d in dirs if os.path.isdir(d) and d not in ret]\n if self.verbosity>1:\n print '(',key,'=',':'.join(ret),')'\n return ret\n\n def get_lib_dirs(self, key='library_dirs'):\n return self.get_paths(self.section, key)\n\n def get_include_dirs(self, key='include_dirs'):\n return self.get_paths(self.section, key)\n\n def get_src_dirs(self, key='src_dirs'):\n return self.get_paths(self.section, key)\n\n def get_libs(self, key, default):\n try:\n libs = self.cp.get(self.section, key)\n except ConfigParser.NoOptionError:\n if not default:\n return []\n if type(default) is type(''):\n return [default]\n return default\n return [b for b in [a.strip() for a in libs.split(',')] if b]\n\n def get_libraries(self, key='libraries'):\n return self.get_libs(key,'')\n\n def check_libs(self,lib_dir,libs,opt_libs =[]):\n \"\"\" If static or shared libraries are available then return\n their info dictionary. \"\"\"\n if self.search_static_first:\n exts = ['.a',so_ext]\n else:\n exts = [so_ext,'.a']\n if sys.platform=='cygwin':\n exts.append('.dll.a')\n for ext in exts:\n info = self._check_libs(lib_dir,libs,opt_libs,ext)\n if info is not None: return info\n return\n\n def _lib_list(self, lib_dir, libs, ext):\n assert type(lib_dir) is type('')\n liblist = []\n for l in libs:\n p = self.combine_paths(lib_dir, 'lib'+l+ext)\n if p:\n assert len(p)==1\n liblist.append(p[0])\n return liblist\n\n def _extract_lib_names(self,libs):\n return [os.path.splitext(os.path.basename(p))[0][3:] \\\n for p in libs]\n\n def _check_libs(self,lib_dir,libs, opt_libs, ext):\n found_libs = self._lib_list(lib_dir, libs, ext)\n if len(found_libs) == len(libs):\n found_libs = self._extract_lib_names(found_libs)\n info = {'libraries' : found_libs, 'library_dirs' : [lib_dir]}\n opt_found_libs = self._lib_list(lib_dir, opt_libs, ext)\n if len(opt_found_libs) == len(opt_libs):\n opt_found_libs = self._extract_lib_names(opt_found_libs)\n info['libraries'].extend(opt_found_libs)\n return info\n\n def combine_paths(self,*args):\n return combine_paths(*args,**{'verbosity':self.verbosity})\n\nclass fftw_info(system_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['rfftw', 'fftw']\n includes = ['fftw.h','rfftw.h']\n macros = [('SCIPY_FFTW_H',None)]\n notfounderror = FFTWNotFoundError\n\n def __init__(self):\n system_info.__init__(self)\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n incl_dirs = self.get_include_dirs()\n incl_dir = None\n libs = self.get_libs(self.section+'_libs', self.libs)\n info = None\n for d in lib_dirs:\n r = self.check_libs(d,libs)\n if r is not None:\n info = r\n break\n if info is not None:\n flag = 0\n for d in incl_dirs:\n if len(self.combine_paths(d,self.includes))==2:\n dict_append(info,include_dirs=[d])\n flag = 1\n incl_dirs = [d]\n incl_dir = d\n break\n if flag:\n dict_append(info,define_macros=self.macros)\n else:\n info = None\n if info is not None:\n self.set_info(**info)\n\nclass dfftw_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['drfftw','dfftw']\n includes = ['dfftw.h','drfftw.h']\n macros = [('SCIPY_DFFTW_H',None)]\n\nclass sfftw_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['srfftw','sfftw']\n includes = ['sfftw.h','srfftw.h']\n macros = [('SCIPY_SFFTW_H',None)]\n\nclass fftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['rfftw_threads','fftw_threads']\n includes = ['fftw_threads.h','rfftw_threads.h']\n macros = [('SCIPY_FFTW_THREADS_H',None)]\n\nclass dfftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['drfftw_threads','dfftw_threads']\n includes = ['dfftw_threads.h','drfftw_threads.h']\n macros = [('SCIPY_DFFTW_THREADS_H',None)]\n\nclass sfftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['srfftw_threads','sfftw_threads']\n includes = ['sfftw_threads.h','srfftw_threads.h']\n macros = [('SCIPY_SFFTW_THREADS_H',None)]\n\nclass djbfft_info(system_info):\n section = 'djbfft'\n dir_env_var = 'DJBFFT'\n notfounderror = DJBFFTNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend(self.combine_paths(d,['djbfft'])+[d])\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n incl_dirs = self.get_include_dirs()\n info = None\n for d in lib_dirs:\n p = self.combine_paths (d,['djbfft.a'])\n if p:\n info = {'extra_objects':p}\n break\n p = self.combine_paths (d,['libdjbfft.a'])\n if p:\n info = {'libraries':['djbfft'],'library_dirs':[d]}\n break\n if info is None:\n return\n for d in incl_dirs:\n if len(self.combine_paths(d,['fftc8.h','fftfreq.h']))==2:\n dict_append(info,include_dirs=[d],\n define_macros=[('SCIPY_DJBFFT_H',None)])\n self.set_info(**info)\n return\n return\n\nclass atlas_info(system_info):\n section = 'atlas'\n dir_env_var = 'ATLAS'\n _lib_names = ['f77blas','cblas']\n if sys.platform[:7]=='freebsd':\n _lib_atlas = ['atlas_r']\n _lib_lapack = ['alapack_r']\n else:\n _lib_atlas = ['atlas']\n _lib_lapack = ['lapack']\n\n notfounderror = AtlasNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend(self.combine_paths(d,['atlas*','ATLAS*',\n 'sse','3dnow','sse2'])+[d])\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n info = {}\n atlas_libs = self.get_libs('atlas_libs',\n self._lib_names + self._lib_atlas)\n lapack_libs = self.get_libs('lapack_libs',self._lib_lapack)\n atlas = None\n lapack = None\n atlas_1 = None\n for d in lib_dirs:\n atlas = self.check_libs(d,atlas_libs,[])\n lapack_atlas = self.check_libs(d,['lapack_atlas'],[])\n if atlas is not None:\n lib_dirs2 = self.combine_paths(d,['atlas*','ATLAS*'])+[d]\n for d2 in lib_dirs2:\n lapack = self.check_libs(d2,lapack_libs,[])\n if lapack is not None:\n break\n else:\n lapack = None\n if lapack is not None:\n break\n if atlas:\n atlas_1 = atlas\n print self.__class__\n if atlas is None:\n atlas = atlas_1\n if atlas is None:\n return\n include_dirs = self.get_include_dirs()\n h = (self.combine_paths(lib_dirs+include_dirs,'cblas.h') or [None])[0]\n if h:\n h = os.path.dirname(h)\n dict_append(info,include_dirs=[h])\n info['language'] = 'c'\n if lapack is not None:\n dict_append(info,**lapack)\n dict_append(info,**atlas)\n elif 'lapack_atlas' in atlas['libraries']:\n dict_append(info,**atlas)\n dict_append(info,define_macros=[('ATLAS_WITH_LAPACK_ATLAS',None)])\n self.set_info(**info)\n return\n else:\n dict_append(info,**atlas)\n dict_append(info,define_macros=[('ATLAS_WITHOUT_LAPACK',None)])\n message = \"\"\"\n*********************************************************************\n Could not find lapack library within the ATLAS installation.\n*********************************************************************\n\"\"\"\n warnings.warn(message)\n self.set_info(**info)\n return\n \n # Check if lapack library is complete, only warn if it is not.\n lapack_dir = lapack['library_dirs'][0]\n lapack_name = lapack['libraries'][0]\n lapack_lib = None\n for e in ['.a',so_ext]:\n fn = os.path.join(lapack_dir,'lib'+lapack_name+e)\n if os.path.exists(fn):\n lapack_lib = fn\n break\n if lapack_lib is not None:\n sz = os.stat(lapack_lib)[6]\n if sz <= 4000*1024:\n message = \"\"\"\n*********************************************************************\n Lapack library (from ATLAS) is probably incomplete:\n size of %s is %sk (expected >4000k)\n\n Follow the instructions in the KNOWN PROBLEMS section of the file\n scipy/INSTALL.txt.\n*********************************************************************\n\"\"\" % (lapack_lib,sz/1024)\n warnings.warn(message)\n else:\n info['language'] = 'f77'\n\n self.set_info(**info)\n\nclass atlas_blas_info(atlas_info):\n _lib_names = ['f77blas','cblas']\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n info = {}\n atlas_libs = self.get_libs('atlas_libs',\n self._lib_names + self._lib_atlas)\n atlas = None\n for d in lib_dirs:\n atlas = self.check_libs(d,atlas_libs,[])\n if atlas is not None:\n break\n if atlas is None:\n return\n include_dirs = self.get_include_dirs()\n h = (self.combine_paths(lib_dirs+include_dirs,'cblas.h') or [None])[0]\n if h:\n h = os.path.dirname(h)\n dict_append(info,include_dirs=[h])\n info['language'] = 'c'\n\n dict_append(info,**atlas)\n\n self.set_info(**info)\n return\n\n\nclass atlas_threads_info(atlas_info):\n dir_env_var = ['PTATLAS','ATLAS']\n _lib_names = ['ptf77blas','ptcblas']\n\nclass atlas_blas_threads_info(atlas_blas_info):\n dir_env_var = ['PTATLAS','ATLAS']\n _lib_names = ['ptf77blas','ptcblas']\n\nclass lapack_atlas_info(atlas_info):\n _lib_names = ['lapack_atlas'] + atlas_info._lib_names\n\nclass lapack_atlas_threads_info(atlas_threads_info):\n _lib_names = ['lapack_atlas'] + atlas_threads_info._lib_names\n\nclass lapack_info(system_info):\n section = 'lapack'\n dir_env_var = 'LAPACK'\n _lib_names = ['lapack']\n notfounderror = LapackNotFoundError\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n\n lapack_libs = self.get_libs('lapack_libs', self._lib_names)\n for d in lib_dirs:\n lapack = self.check_libs(d,lapack_libs,[])\n if lapack is not None:\n info = lapack \n break\n else:\n return\n info['language'] = 'f77'\n self.set_info(**info)\n\nclass lapack_src_info(system_info):\n section = 'lapack_src'\n dir_env_var = 'LAPACK_SRC'\n notfounderror = LapackSrcNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['LAPACK*/SRC','SRC']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'dgesv.f')):\n src_dir = d\n break\n if not src_dir:\n #XXX: Get sources from netlib. May be ask first.\n return\n # The following is extracted from LAPACK-3.0/SRC/Makefile\n allaux='''\n ilaenv ieeeck lsame lsamen xerbla\n ''' # *.f\n laux = '''\n bdsdc bdsqr disna labad lacpy ladiv lae2 laebz laed0 laed1\n laed2 laed3 laed4 laed5 laed6 laed7 laed8 laed9 laeda laev2\n lagtf lagts lamch lamrg lanst lapy2 lapy3 larnv larrb larre\n larrf lartg laruv las2 lascl lasd0 lasd1 lasd2 lasd3 lasd4\n lasd5 lasd6 lasd7 lasd8 lasd9 lasda lasdq lasdt laset lasq1\n lasq2 lasq3 lasq4 lasq5 lasq6 lasr lasrt lassq lasv2 pttrf\n stebz stedc steqr sterf\n ''' # [s|d]*.f\n lasrc = '''\n gbbrd gbcon gbequ gbrfs gbsv gbsvx gbtf2 gbtrf gbtrs gebak\n gebal gebd2 gebrd gecon geequ gees geesx geev geevx gegs gegv\n gehd2 gehrd gelq2 gelqf gels gelsd gelss gelsx gelsy geql2\n geqlf geqp3 geqpf geqr2 geqrf gerfs gerq2 gerqf gesc2 gesdd\n gesv gesvd gesvx getc2 getf2 getrf getri getrs ggbak ggbal\n gges ggesx ggev ggevx ggglm gghrd gglse ggqrf ggrqf ggsvd\n ggsvp gtcon gtrfs gtsv gtsvx gttrf gttrs gtts2 hgeqz hsein\n hseqr labrd lacon laein lags2 lagtm lahqr lahrd laic1 lals0\n lalsa lalsd langb lange langt lanhs lansb lansp lansy lantb\n lantp lantr lapll lapmt laqgb laqge laqp2 laqps laqsb laqsp\n laqsy lar1v lar2v larf larfb larfg larft larfx largv larrv\n lartv larz larzb larzt laswp lasyf latbs latdf latps latrd\n latrs latrz latzm lauu2 lauum pbcon pbequ pbrfs pbstf pbsv\n pbsvx pbtf2 pbtrf pbtrs pocon poequ porfs posv posvx potf2\n potrf potri potrs ppcon ppequ pprfs ppsv ppsvx pptrf pptri\n pptrs ptcon pteqr ptrfs ptsv ptsvx pttrs ptts2 spcon sprfs\n spsv spsvx sptrf sptri sptrs stegr stein sycon syrfs sysv\n sysvx sytf2 sytrf sytri sytrs tbcon tbrfs tbtrs tgevc tgex2\n tgexc tgsen tgsja tgsna tgsy2 tgsyl tpcon tprfs tptri tptrs\n trcon trevc trexc trrfs trsen trsna trsyl trti2 trtri trtrs\n tzrqf tzrzf\n ''' # [s|c|d|z]*.f\n sd_lasrc = '''\n laexc lag2 lagv2 laln2 lanv2 laqtr lasy2 opgtr opmtr org2l\n org2r orgbr orghr orgl2 orglq orgql orgqr orgr2 orgrq orgtr\n orm2l orm2r ormbr ormhr orml2 ormlq ormql ormqr ormr2 ormr3\n ormrq ormrz ormtr rscl sbev sbevd sbevx sbgst sbgv sbgvd sbgvx\n sbtrd spev spevd spevx spgst spgv spgvd spgvx sptrd stev stevd\n stevr stevx syev syevd syevr syevx sygs2 sygst sygv sygvd\n sygvx sytd2 sytrd\n ''' # [s|d]*.f\n cz_lasrc = '''\n bdsqr hbev hbevd hbevx hbgst hbgv hbgvd hbgvx hbtrd hecon heev\n heevd heevr heevx hegs2 hegst hegv hegvd hegvx herfs hesv\n hesvx hetd2 hetf2 hetrd hetrf hetri hetrs hpcon hpev hpevd\n hpevx hpgst hpgv hpgvd hpgvx hprfs hpsv hpsvx hptrd hptrf\n hptri hptrs lacgv lacp2 lacpy lacrm lacrt ladiv laed0 laed7\n laed8 laesy laev2 lahef lanhb lanhe lanhp lanht laqhb laqhe\n laqhp larcm larnv lartg lascl laset lasr lassq pttrf rot spmv\n spr stedc steqr symv syr ung2l ung2r ungbr unghr ungl2 unglq\n ungql ungqr ungr2 ungrq ungtr unm2l unm2r unmbr unmhr unml2\n unmlq unmql unmqr unmr2 unmr3 unmrq unmrz unmtr upgtr upmtr\n ''' # [c|z]*.f\n #######\n sclaux = laux + ' econd ' # s*.f\n dzlaux = laux + ' secnd ' # d*.f\n slasrc = lasrc + sd_lasrc # s*.f\n dlasrc = lasrc + sd_lasrc # d*.f\n clasrc = lasrc + cz_lasrc + ' srot srscl ' # c*.f\n zlasrc = lasrc + cz_lasrc + ' drot drscl ' # z*.f\n oclasrc = ' icmax1 scsum1 ' # *.f\n ozlasrc = ' izmax1 dzsum1 ' # *.f\n sources = ['s%s.f'%f for f in (sclaux+slasrc).split()] \\\n + ['d%s.f'%f for f in (dzlaux+dlasrc).split()] \\\n + ['c%s.f'%f for f in (clasrc).split()] \\\n + ['z%s.f'%f for f in (zlasrc).split()] \\\n + ['%s.f'%f for f in (allaux+oclasrc+ozlasrc).split()]\n sources = [os.path.join(src_dir,f) for f in sources]\n #XXX: should we check here actual existence of source files?\n info = {'sources':sources,'language':'f77'}\n self.set_info(**info)\n\natlas_version_c_text = r'''\n/* This file is generated from scipy_distutils/system_info.py */\n#ifdef __CPLUSPLUS__\nextern \"C\" {\n#endif\n#include \"Python.h\"\nstatic PyMethodDef module_methods[] = { {NULL,NULL} };\nDL_EXPORT(void) initatlas_version(void) {\n void ATL_buildinfo(void);\n ATL_buildinfo();\n Py_InitModule(\"atlas_version\", module_methods);\n}\n#ifdef __CPLUSCPLUS__\n}\n#endif\n'''\n\ndef get_atlas_version(**config):\n from core import Extension, setup\n from misc_util import get_build_temp\n import log\n magic = hex(hash(`config`))\n def atlas_version_c(extension, build_dir,magic=magic):\n source = os.path.join(build_dir,'atlas_version_%s.c' % (magic))\n if os.path.isfile(source):\n from distutils.dep_util import newer\n if newer(source,__file__):\n return source\n f = open(source,'w')\n f.write(atlas_version_c_text)\n f.close()\n return source\n ext = Extension('atlas_version',\n sources=[atlas_version_c],\n **config)\n extra_args = ['--build-lib',get_build_temp()]\n for a in sys.argv:\n if re.match('[-][-]compiler[=]',a):\n extra_args.append(a)\n try:\n dist = setup(ext_modules=[ext],\n script_name = 'get_atlas_version',\n script_args = ['build_src','build_ext']+extra_args)\n except Exception,msg:\n print \"##### msg: %s\" % msg\n if not msg:\n msg = \"Unknown Exception\"\n log.warn(msg)\n return None\n\n from distutils.sysconfig import get_config_var\n so_ext = get_config_var('SO')\n build_ext = dist.get_command_obj('build_ext')\n target = os.path.join(build_ext.build_lib,'atlas_version'+so_ext)\n from exec_command import exec_command,get_pythonexe\n cmd = [get_pythonexe(),'-c',\n '\"import imp;imp.load_dynamic(\\\\\"atlas_version\\\\\",\\\\\"%s\\\\\")\"'\\\n % (os.path.basename(target))]\n s,o = exec_command(cmd,execute_in=os.path.dirname(target),use_tee=0)\n atlas_version = None\n if not s:\n m = re.search(r'ATLAS version (?P\\d+[.]\\d+[.]\\d+)',o)\n if m:\n atlas_version = m.group('version')\n if atlas_version is None:\n if re.search(r'undefined symbol: ATL_buildinfo',o,re.M):\n atlas_version = '3.2.1_pre3.3.6'\n else:\n print 'Command:',' '.join(cmd)\n print 'Status:',s\n print 'Output:',o\n return atlas_version\n\n\nclass lapack_opt_info(system_info):\n \n def calc_info(self):\n\n if sys.platform=='darwin' and not os.environ.get('ATLAS',None):\n args = []\n link_args = []\n if os.path.exists('/System/Library/Frameworks/Accelerate.framework/'):\n args.extend(['-faltivec','-framework','Accelerate'])\n link_args.extend(['-Wl,-framework','-Wl,Accelerate'])\n elif os.path.exists('/System/Library/Frameworks/vecLib.framework/'):\n args.extend(['-faltivec','-framework','vecLib'])\n link_args.extend(['-Wl,-framework','-Wl,vecLib'])\n if args:\n self.set_info(extra_compile_args=args,\n extra_link_args=link_args,\n define_macros=[('NO_ATLAS_INFO',3)])\n return\n\n atlas_info = get_info('atlas_threads')\n if not atlas_info:\n atlas_info = get_info('atlas')\n #atlas_info = {} ## uncomment for testing\n atlas_version = None\n need_lapack = 0\n need_blas = 0\n info = {}\n if atlas_info:\n version_info = atlas_info.copy()\n atlas_version = get_atlas_version(**version_info)\n if not atlas_info.has_key('define_macros'):\n atlas_info['define_macros'] = []\n if atlas_version is None:\n atlas_info['define_macros'].append(('NO_ATLAS_INFO',2))\n else:\n atlas_info['define_macros'].append(('ATLAS_INFO',\n '\"\\\\\"%s\\\\\"\"' % atlas_version))\n\t\tif atlas_version=='3.2.1_pre3.3.6':\n\t\t atlas_info['define_macros'].append(('NO_ATLAS_INFO',4))\n l = atlas_info.get('define_macros',[])\n if ('ATLAS_WITH_LAPACK_ATLAS',None) in l \\\n or ('ATLAS_WITHOUT_LAPACK',None) in l:\n need_lapack = 1\n info = atlas_info\n else:\n warnings.warn(AtlasNotFoundError.__doc__)\n need_blas = 1\n need_lapack = 1\n dict_append(info,define_macros=[('NO_ATLAS_INFO',1)])\n\n if need_lapack:\n lapack_info = get_info('lapack')\n #lapack_info = {} ## uncomment for testing\n if lapack_info:\n dict_append(info,**lapack_info)\n else:\n warnings.warn(LapackNotFoundError.__doc__)\n lapack_src_info = get_info('lapack_src')\n if not lapack_src_info:\n warnings.warn(LapackSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('flapack_src',lapack_src_info)])\n\n if need_blas:\n blas_info = get_info('blas')\n #blas_info = {} ## uncomment for testing\n if blas_info:\n dict_append(info,**blas_info)\n else:\n warnings.warn(BlasNotFoundError.__doc__)\n blas_src_info = get_info('blas_src')\n if not blas_src_info:\n warnings.warn(BlasSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('fblas_src',blas_src_info)])\n\n self.set_info(**info)\n return\n\n\nclass blas_opt_info(system_info):\n \n def calc_info(self):\n\n if sys.platform=='darwin' and not os.environ.get('ATLAS',None):\n args = []\n link_args = []\n if os.path.exists('/System/Library/Frameworks/Accelerate.framework/'):\n args.extend(['-faltivec','-framework','Accelerate'])\n link_args.extend(['-Wl,-framework','-Wl,Accelerate'])\n elif os.path.exists('/System/Library/Frameworks/vecLib.framework/'):\n args.extend(['-faltivec','-framework','vecLib'])\n link_args.extend(['-Wl,-framework','-Wl,vecLib'])\n if args:\n self.set_info(extra_compile_args=args,\n extra_link_args=link_args,\n define_macros=[('NO_ATLAS_INFO',3)])\n return\n\n atlas_info = get_info('atlas_blas_threads')\n if not atlas_info:\n atlas_info = get_info('atlas_blas')\n atlas_version = None\n need_blas = 0\n info = {}\n if atlas_info:\n version_info = atlas_info.copy()\n atlas_version = get_atlas_version(**version_info)\n if not atlas_info.has_key('define_macros'):\n atlas_info['define_macros'] = []\n if atlas_version is None:\n atlas_info['define_macros'].append(('NO_ATLAS_INFO',2))\n else:\n atlas_info['define_macros'].append(('ATLAS_INFO',\n '\"\\\\\"%s\\\\\"\"' % atlas_version))\n info = atlas_info\n else:\n warnings.warn(AtlasNotFoundError.__doc__)\n need_blas = 1\n dict_append(info,define_macros=[('NO_ATLAS_INFO',1)])\n\n if need_blas:\n blas_info = get_info('blas')\n if blas_info:\n dict_append(info,**blas_info)\n else:\n warnings.warn(BlasNotFoundError.__doc__)\n blas_src_info = get_info('blas_src')\n if not blas_src_info:\n warnings.warn(BlasSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('fblas_src',blas_src_info)])\n\n self.set_info(**info)\n return\n\n\nclass blas_info(system_info):\n section = 'blas'\n dir_env_var = 'BLAS'\n _lib_names = ['blas']\n notfounderror = BlasNotFoundError\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n\n blas_libs = self.get_libs('blas_libs', self._lib_names)\n for d in lib_dirs:\n blas = self.check_libs(d,blas_libs,[])\n if blas is not None:\n info = blas \n break\n else:\n return\n info['language'] = 'f77' # XXX: is it generally true?\n self.set_info(**info)\n\n\nclass blas_src_info(system_info):\n section = 'blas_src'\n dir_env_var = 'BLAS_SRC'\n notfounderror = BlasSrcNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['blas']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'daxpy.f')):\n src_dir = d\n break\n if not src_dir:\n #XXX: Get sources from netlib. May be ask first.\n return\n blas1 = '''\n caxpy csscal dnrm2 dzasum saxpy srotg zdotc ccopy cswap drot\n dznrm2 scasum srotm zdotu cdotc dasum drotg icamax scnrm2\n srotmg zdrot cdotu daxpy drotm idamax scopy sscal zdscal crotg\n dcabs1 drotmg isamax sdot sswap zrotg cscal dcopy dscal izamax\n snrm2 zaxpy zscal csrot ddot dswap sasum srot zcopy zswap\n '''\n blas2 = '''\n cgbmv chpmv ctrsv dsymv dtrsv sspr2 strmv zhemv ztpmv cgemv\n chpr dgbmv dsyr lsame ssymv strsv zher ztpsv cgerc chpr2 dgemv\n dsyr2 sgbmv ssyr xerbla zher2 ztrmv cgeru ctbmv dger dtbmv\n sgemv ssyr2 zgbmv zhpmv ztrsv chbmv ctbsv dsbmv dtbsv sger\n stbmv zgemv zhpr chemv ctpmv dspmv dtpmv ssbmv stbsv zgerc\n zhpr2 cher ctpsv dspr dtpsv sspmv stpmv zgeru ztbmv cher2\n ctrmv dspr2 dtrmv sspr stpsv zhbmv ztbsv\n '''\n blas3 = '''\n cgemm csymm ctrsm dsyrk sgemm strmm zhemm zsyr2k chemm csyr2k\n dgemm dtrmm ssymm strsm zher2k zsyrk cher2k csyrk dsymm dtrsm\n ssyr2k zherk ztrmm cherk ctrmm dsyr2k ssyrk zgemm zsymm ztrsm\n '''\n sources = [os.path.join(src_dir,f+'.f') \\\n for f in (blas1+blas2+blas3).split()]\n #XXX: should we check here actual existence of source files?\n info = {'sources':sources,'language':'f77'}\n self.set_info(**info)\n\nclass x11_info(system_info):\n section = 'x11'\n notfounderror = X11NotFoundError\n\n def __init__(self):\n system_info.__init__(self,\n default_lib_dirs=default_x11_lib_dirs,\n default_include_dirs=default_x11_include_dirs)\n\n def calc_info(self):\n if sys.platform in ['win32']:\n return\n lib_dirs = self.get_lib_dirs()\n include_dirs = self.get_include_dirs()\n x11_libs = self.get_libs('x11_libs', ['X11'])\n for lib_dir in lib_dirs:\n info = self.check_libs(lib_dir, x11_libs, [])\n if info is not None:\n break\n else:\n return\n inc_dir = None\n for d in include_dirs:\n if self.combine_paths(d, 'X11/X.h'):\n inc_dir = d\n break\n if inc_dir is not None:\n dict_append(info, include_dirs=[inc_dir])\n self.set_info(**info)\n\nclass numpy_info(system_info):\n section = 'numpy'\n modulename = 'Numeric'\n notfounderror = NumericNotFoundError\n\n def __init__(self):\n from distutils.sysconfig import get_python_inc\n include_dirs = []\n try:\n module = __import__(self.modulename)\n prefix = []\n for name in module.__file__.split(os.sep):\n if name=='lib':\n break\n prefix.append(name)\n include_dirs.append(get_python_inc(prefix=os.sep.join(prefix)))\n except ImportError:\n pass\n py_incl_dir = get_python_inc()\n include_dirs.append(py_incl_dir)\n for d in default_include_dirs:\n d = os.path.join(d, os.path.basename(py_incl_dir))\n if d not in include_dirs:\n include_dirs.append(d)\n system_info.__init__(self,\n default_lib_dirs=[],\n default_include_dirs=include_dirs)\n\n def calc_info(self):\n try:\n module = __import__(self.modulename)\n except ImportError:\n return\n info = {}\n macros = [(self.modulename.upper()+'_VERSION',\n '\"\\\\\"%s\\\\\"\"' % (module.__version__)),\n (self.modulename.upper(),None)]\n## try:\n## macros.append(\n## (self.modulename.upper()+'_VERSION_HEX',\n## hex(vstr2hex(module.__version__))),\n## )\n## except Exception,msg:\n## print msg\n dict_append(info, define_macros = macros)\n include_dirs = self.get_include_dirs()\n inc_dir = None\n for d in include_dirs:\n if self.combine_paths(d,\n os.path.join(self.modulename,\n 'arrayobject.h')):\n inc_dir = d\n break\n if inc_dir is not None:\n dict_append(info, include_dirs=[inc_dir])\n if info:\n self.set_info(**info)\n return\n\nclass numarray_info(numpy_info):\n section = 'numarray'\n modulename = 'numarray'\n\nclass boost_python_info(system_info):\n section = 'boost_python'\n dir_env_var = 'BOOST'\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['boost*']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n from distutils.sysconfig import get_python_inc\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'libs','python','src','module.cpp')):\n src_dir = d\n break\n if not src_dir:\n return\n py_incl_dir = get_python_inc()\n srcs_dir = os.path.join(src_dir,'libs','python','src')\n bpl_srcs = glob(os.path.join(srcs_dir,'*.cpp'))\n bpl_srcs += glob(os.path.join(srcs_dir,'*','*.cpp'))\n info = {'libraries':[('boost_python_src',{'include_dirs':[src_dir,py_incl_dir],\n 'sources':bpl_srcs})],\n 'include_dirs':[src_dir],\n }\n if info:\n self.set_info(**info)\n return\n\nclass agg2_info(system_info):\n section = 'agg2'\n dir_env_var = 'AGG2'\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['agg2*']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'src','agg_affine_matrix.cpp')):\n src_dir = d\n break\n if not src_dir:\n return\n if sys.platform=='win32':\n agg2_srcs = glob(os.path.join(src_dir,'src','platform','win32','agg_win32_bmp.cpp'))\n else:\n agg2_srcs = glob(os.path.join(src_dir,'src','*.cpp'))\n agg2_srcs += [os.path.join(src_dir,'src','platform','X11','agg_platform_support.cpp')]\n \n info = {'libraries':[('agg2_src',{'sources':agg2_srcs,\n 'include_dirs':[os.path.join(src_dir,'include')],\n })],\n 'include_dirs':[os.path.join(src_dir,'include')],\n }\n if info:\n self.set_info(**info)\n return\n\nclass _pkg_config_info(system_info):\n section = None\n config_env_var = 'PKG_CONFIG'\n default_config_exe = 'pkg-config'\n append_config_exe = ''\n version_macro_name = None\n release_macro_name = None\n version_flag = '--modversion'\n cflags_flag = '--cflags'\n\n def get_config_exe(self):\n if os.environ.has_key(self.config_env_var):\n return os.environ[self.config_env_var]\n return self.default_config_exe\n def get_config_output(self, config_exe, option):\n s,o = exec_command(config_exe+' '+self.append_config_exe+' '+option,use_tee=0)\n if not s:\n return o\n\n def calc_info(self):\n config_exe = find_executable(self.get_config_exe())\n if not os.path.isfile(config_exe):\n print 'File not found: %s. Cannot determine %s info.' \\\n % (config_exe, self.section)\n return\n info = {}\n macros = []\n libraries = []\n library_dirs = []\n include_dirs = []\n extra_link_args = []\n extra_compile_args = []\n version = self.get_config_output(config_exe,self.version_flag)\n if version:\n macros.append((self.__class__.__name__.split('.')[-1].upper(),\n '\"\\\\\"%s\\\\\"\"' % (version)))\n if self.version_macro_name:\n macros.append((self.version_macro_name+'_%s' % (version.replace('.','_')),None))\n if self.release_macro_name:\n release = self.get_config_output(config_exe,'--release')\n if release:\n macros.append((self.release_macro_name+'_%s' % (release.replace('.','_')),None))\n opts = self.get_config_output(config_exe,'--libs')\n if opts:\n for opt in opts.split():\n if opt[:2]=='-l':\n libraries.append(opt[2:])\n elif opt[:2]=='-L':\n library_dirs.append(opt[2:])\n else:\n extra_link_args.append(opt)\n opts = self.get_config_output(config_exe,self.cflags_flag)\n if opts:\n for opt in opts.split():\n if opt[:2]=='-I':\n include_dirs.append(opt[2:])\n elif opt[:2]=='-D':\n if '=' in opt:\n n,v = opt[2:].split('=')\n macros.append((n,v))\n else:\n macros.append((opt[2:],None))\n else:\n extra_compile_args.append(opt)\n if macros: dict_append(info, define_macros = macros)\n if libraries: dict_append(info, libraries = libraries)\n if library_dirs: dict_append(info, library_dirs = library_dirs)\n if include_dirs: dict_append(info, include_dirs = include_dirs)\n if extra_link_args: dict_append(info, extra_link_args = extra_link_args)\n if extra_compile_args: dict_append(info, extra_compile_args = extra_compile_args)\n if info:\n self.set_info(**info)\n return\n\nclass wx_info(_pkg_config_info):\n section = 'wx'\n config_env_var = 'WX_CONFIG'\n default_config_exe = 'wx-config'\n append_config_exe = ''\n version_macro_name = 'WX_VERSION'\n release_macro_name = 'WX_RELEASE'\n version_flag = '--version'\n cflags_flag = '--cxxflags'\n\nclass gdk_pixbuf_xlib_2_info(_pkg_config_info):\n section = 'gdk_pixbuf_xlib_2'\n append_config_exe = 'gdk-pixbuf-xlib-2.0'\n version_macro_name = 'GDK_PIXBUF_XLIB_VERSION'\n\nclass gdk_pixbuf_2_info(_pkg_config_info):\n section = 'gdk_pixbuf_2'\n append_config_exe = 'gdk-pixbuf-2.0'\n version_macro_name = 'GDK_PIXBUF_VERSION'\n\nclass gdk_x11_2_info(_pkg_config_info):\n section = 'gdk_x11_2'\n append_config_exe = 'gdk-x11-2.0'\n version_macro_name = 'GDK_X11_VERSION'\n\nclass gdk_2_info(_pkg_config_info):\n section = 'gdk_2'\n append_config_exe = 'gdk-2.0'\n version_macro_name = 'GDK_VERSION'\n\nclass gdk_info(_pkg_config_info):\n section = 'gdk'\n append_config_exe = 'gdk'\n version_macro_name = 'GDK_VERSION'\n\nclass gtkp_x11_2_info(_pkg_config_info):\n section = 'gtkp_x11_2'\n append_config_exe = 'gtk+-x11-2.0'\n version_macro_name = 'GTK_X11_VERSION'\n\n\nclass gtkp_2_info(_pkg_config_info):\n section = 'gtkp_2'\n append_config_exe = 'gtk+-2.0'\n version_macro_name = 'GTK_VERSION'\n\nclass xft_info(_pkg_config_info):\n section = 'xft'\n append_config_exe = 'xft'\n version_macro_name = 'XFT_VERSION'\n\nclass freetype2_info(_pkg_config_info):\n section = 'freetype2'\n append_config_exe = 'freetype2'\n version_macro_name = 'FREETYPE2_VERSION'\n\n## def vstr2hex(version):\n## bits = []\n## n = [24,16,8,4,0]\n## r = 0\n## for s in version.split('.'):\n## r |= int(s) << n[0]\n## del n[0]\n## return r\n\n#--------------------------------------------------------------------\n\ndef combine_paths(*args,**kws):\n \"\"\" Return a list of existing paths composed by all combinations of\n items from arguments.\n \"\"\"\n r = []\n for a in args:\n if not a: continue\n if type(a) is types.StringType:\n a = [a]\n r.append(a)\n args = r\n if not args: return []\n if len(args)==1:\n result = reduce(lambda a,b:a+b,map(glob,args[0]),[])\n elif len (args)==2:\n result = []\n for a0 in args[0]:\n for a1 in args[1]:\n result.extend(glob(os.path.join(a0,a1)))\n else:\n result = combine_paths(*(combine_paths(args[0],args[1])+args[2:]))\n verbosity = kws.get('verbosity',1)\n if verbosity>1 and result:\n print '(','paths:',','.join(result),')'\n return result\n\nlanguage_map = {'c':0,'c++':1,'f77':2,'f90':3}\ninv_language_map = {0:'c',1:'c++',2:'f77',3:'f90'}\ndef dict_append(d,**kws):\n languages = []\n for k,v in kws.items():\n if k=='language':\n languages.append(v)\n continue\n if d.has_key(k):\n if k in ['library_dirs','include_dirs','define_macros']:\n [d[k].append(vv) for vv in v if vv not in d[k]]\n else:\n d[k].extend(v)\n else:\n d[k] = v\n if languages:\n l = inv_language_map[max([language_map.get(l,0) for l in languages])]\n d['language'] = l\n return\n\ndef show_all():\n import system_info\n import pprint\n match_info = re.compile(r'.*?_info').match\n show_only = []\n for n in sys.argv[1:]:\n if n[-5:] != '_info':\n n = n + '_info'\n show_only.append(n)\n show_all = not show_only\n for n in filter(match_info,dir(system_info)):\n if n in ['system_info','get_info']: continue\n if not show_all:\n if n not in show_only: continue\n del show_only[show_only.index(n)]\n c = getattr(system_info,n)()\n c.verbosity = 2\n r = c.get_info()\n if show_only:\n print 'Info classes not defined:',','.join(show_only)\nif __name__ == \"__main__\":\n show_all()\n", "source_code_before": "#!/usr/bin/env python\n\"\"\"\nThis file defines a set of system_info classes for getting\ninformation about various resources (libraries, library directories,\ninclude directories, etc.) in the system. Currently, the following\nclasses are available:\n\n atlas_info\n atlas_threads_info\n atlas_blas_info\n atlas_blas_threads_info\n lapack_atlas_info\n blas_info\n lapack_info\n blas_opt_info # usage recommended\n lapack_opt_info # usage recommended\n fftw_info,dfftw_info,sfftw_info\n fftw_threads_info,dfftw_threads_info,sfftw_threads_info\n djbfft_info\n x11_info\n lapack_src_info\n blas_src_info\n numpy_info\n numarray_info\n boost_python_info\n agg2_info\n wx_info\n gdk_pixbuf_xlib_2_info\n gdk_pixbuf_2_info\n gdk_x11_2_info\n gtkp_x11_2_info\n gtkp_2_info\n xft_info\n freetype2_info\n\nUsage:\n info_dict = get_info()\n where is a string 'atlas','x11','fftw','lapack','blas',\n 'lapack_src', 'blas_src', etc. For a complete list of allowed names,\n see the definition of get_info() function below.\n\n Returned info_dict is a dictionary which is compatible with\n distutils.setup keyword arguments. If info_dict == {}, then the\n asked resource is not available (system_info could not find it).\n\n Several *_info classes specify an environment variable to specify\n the locations of software. When setting the corresponding environment\n variable to 'None' then the software will be ignored, even when it\n is available in system.\n\nGlobal parameters:\n system_info.search_static_first - search static libraries (.a)\n in precedence to shared ones (.so, .sl) if enabled.\n system_info.verbosity - output the results to stdout if enabled.\n\nThe file 'site.cfg' in the same directory as this module is read\nfor configuration options. The format is that used by ConfigParser (i.e.,\nWindows .INI style). The section DEFAULT has options that are the default\nfor each section. The available sections are fftw, atlas, and x11. Appropiate\ndefaults are used if nothing is specified.\n\nThe order of finding the locations of resources is the following:\n 1. environment variable\n 2. section in site.cfg\n 3. DEFAULT section in site.cfg\nOnly the first complete match is returned.\n\nExample:\n----------\n[DEFAULT]\nlibrary_dirs = /usr/lib:/usr/local/lib:/opt/lib\ninclude_dirs = /usr/include:/usr/local/include:/opt/include\nsrc_dirs = /usr/local/src:/opt/src\n# search static libraries (.a) in preference to shared ones (.so)\nsearch_static_first = 0\n\n[fftw]\nfftw_libs = rfftw, fftw\nfftw_opt_libs = rfftw_threaded, fftw_threaded\n# if the above aren't found, look for {s,d}fftw_libs and {s,d}fftw_opt_libs\n\n[atlas]\nlibrary_dirs = /usr/lib/3dnow:/usr/lib/3dnow/atlas\n# for overriding the names of the atlas libraries\natlas_libs = lapack, f77blas, cblas, atlas\n\n[x11]\nlibrary_dirs = /usr/X11R6/lib\ninclude_dirs = /usr/X11R6/include\n----------\n\nAuthors:\n Pearu Peterson , February 2002\n David M. Cooke , April 2002\n\nCopyright 2002 Pearu Peterson all rights reserved,\nPearu Peterson \nPermission to use, modify, and distribute this software is given under the \nterms of the SciPy (BSD style) license. See LICENSE.txt that came with\nthis distribution for specifics.\n\nNO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\"\"\"\n\n__revision__ = '$Id$'\nimport sys,os,re,types\nimport warnings\nfrom distutils.errors import DistutilsError\nfrom glob import glob\nimport ConfigParser\nfrom exec_command import find_executable, exec_command\n\nfrom distutils.sysconfig import get_config_vars\n\nif sys.platform == 'win32':\n default_lib_dirs = ['C:\\\\'] # probably not very helpful...\n default_include_dirs = []\n default_src_dirs = ['.']\n default_x11_lib_dirs = []\n default_x11_include_dirs = []\nelse:\n default_lib_dirs = ['/usr/local/lib', '/opt/lib', '/usr/lib',\n '/sw/lib']\n default_include_dirs = ['/usr/local/include',\n '/opt/include', '/usr/include',\n '/sw/include']\n default_src_dirs = ['.','/usr/local/src', '/opt/src','/sw/src']\n default_x11_lib_dirs = ['/usr/X11R6/lib','/usr/X11/lib','/usr/lib']\n default_x11_include_dirs = ['/usr/X11R6/include','/usr/X11/include',\n '/usr/include']\n\nif os.path.join(sys.prefix, 'lib') not in default_lib_dirs:\n default_lib_dirs.insert(0,os.path.join(sys.prefix, 'lib'))\n default_include_dirs.append(os.path.join(sys.prefix, 'include'))\n default_src_dirs.append(os.path.join(sys.prefix, 'src'))\n\ndefault_lib_dirs = filter(os.path.isdir, default_lib_dirs)\ndefault_include_dirs = filter(os.path.isdir, default_include_dirs)\ndefault_src_dirs = filter(os.path.isdir, default_src_dirs)\n\nso_ext = get_config_vars('SO')[0] or ''\n\ndef get_info(name,notfound_action=0):\n \"\"\"\n notfound_action:\n 0 - do nothing\n 1 - display warning message\n 2 - raise error\n \"\"\"\n cl = {'atlas':atlas_info, # use lapack_opt or blas_opt instead\n 'atlas_threads':atlas_threads_info, # ditto\n 'atlas_blas':atlas_blas_info,\n 'atlas_blas_threads':atlas_blas_threads_info,\n 'lapack_atlas':lapack_atlas_info, # use lapack_opt instead\n 'lapack_atlas_threads':lapack_atlas_threads_info, # ditto\n 'x11':x11_info,\n 'fftw':fftw_info,\n 'dfftw':dfftw_info,\n 'sfftw':sfftw_info,\n 'fftw_threads':fftw_threads_info,\n 'dfftw_threads':dfftw_threads_info,\n 'sfftw_threads':sfftw_threads_info,\n 'djbfft':djbfft_info,\n 'blas':blas_info, # use blas_opt instead\n 'lapack':lapack_info, # use lapack_opt instead\n 'lapack_src':lapack_src_info,\n 'blas_src':blas_src_info,\n 'numpy':numpy_info,\n 'numarray':numarray_info,\n 'lapack_opt':lapack_opt_info,\n 'blas_opt':blas_opt_info,\n 'boost_python':boost_python_info,\n 'agg2':agg2_info,\n 'wx':wx_info,\n 'gdk_pixbuf_xlib_2':gdk_pixbuf_xlib_2_info,\n 'gdk-pixbuf-xlib-2.0':gdk_pixbuf_xlib_2_info,\n 'gdk_pixbuf_2':gdk_pixbuf_2_info,\n 'gdk-pixbuf-2.0':gdk_pixbuf_2_info,\n 'gdk':gdk_info,\n 'gdk_2':gdk_2_info,\n 'gdk-2.0':gdk_2_info,\n 'gdk_x11_2':gdk_x11_2_info,\n 'gdk-x11-2.0':gdk_x11_2_info,\n 'gtkp_x11_2':gtkp_x11_2_info,\n 'gtk+-x11-2.0':gtkp_x11_2_info,\n 'gtkp_2':gtkp_2_info,\n 'gtk+-2.0':gtkp_2_info,\n 'xft':xft_info,\n 'freetype2':freetype2_info,\n }.get(name.lower(),system_info)\n return cl().get_info(notfound_action)\n\nclass NotFoundError(DistutilsError):\n \"\"\"Some third-party program or library is not found.\"\"\"\n\nclass AtlasNotFoundError(NotFoundError):\n \"\"\"\n Atlas (http://math-atlas.sourceforge.net/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [atlas]) or by setting\n the ATLAS environment variable.\"\"\"\n\nclass LapackNotFoundError(NotFoundError):\n \"\"\"\n Lapack (http://www.netlib.org/lapack/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [lapack]) or by setting\n the LAPACK environment variable.\"\"\"\n\nclass LapackSrcNotFoundError(LapackNotFoundError):\n \"\"\"\n Lapack (http://www.netlib.org/lapack/) sources not found.\n Directories to search for the sources can be specified in the\n scipy_distutils/site.cfg file (section [lapack_src]) or by setting\n the LAPACK_SRC environment variable.\"\"\"\n\nclass BlasNotFoundError(NotFoundError):\n \"\"\"\n Blas (http://www.netlib.org/blas/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [blas]) or by setting\n the BLAS environment variable.\"\"\"\n\nclass BlasSrcNotFoundError(BlasNotFoundError):\n \"\"\"\n Blas (http://www.netlib.org/blas/) sources not found.\n Directories to search for the sources can be specified in the\n scipy_distutils/site.cfg file (section [blas_src]) or by setting\n the BLAS_SRC environment variable.\"\"\"\n\nclass FFTWNotFoundError(NotFoundError):\n \"\"\"\n FFTW (http://www.fftw.org/) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [fftw]) or by setting\n the FFTW environment variable.\"\"\"\n\nclass DJBFFTNotFoundError(NotFoundError):\n \"\"\"\n DJBFFT (http://cr.yp.to/djbfft.html) libraries not found.\n Directories to search for the libraries can be specified in the\n scipy_distutils/site.cfg file (section [djbfft]) or by setting\n the DJBFFT environment variable.\"\"\"\n\nclass F2pyNotFoundError(NotFoundError):\n \"\"\"\n f2py2e (http://cens.ioc.ee/projects/f2py2e/) module not found.\n Get it from above location, install it, and retry setup.py.\"\"\"\n\nclass NumericNotFoundError(NotFoundError):\n \"\"\"\n Numeric (http://www.numpy.org/) module not found.\n Get it from above location, install it, and retry setup.py.\"\"\"\n\nclass X11NotFoundError(NotFoundError):\n \"\"\"X11 libraries not found.\"\"\"\n\nclass system_info:\n\n \"\"\" get_info() is the only public method. Don't use others.\n \"\"\"\n section = 'DEFAULT'\n dir_env_var = None\n search_static_first = 0 # XXX: disabled by default, may disappear in\n # future unless it is proved to be useful.\n verbosity = 1\n saved_results = {}\n\n notfounderror = NotFoundError\n\n def __init__ (self,\n default_lib_dirs=default_lib_dirs,\n default_include_dirs=default_include_dirs,\n verbosity = 1,\n ):\n self.__class__.info = {}\n self.local_prefixes = []\n defaults = {}\n defaults['libraries'] = ''\n defaults['library_dirs'] = os.pathsep.join(default_lib_dirs)\n defaults['include_dirs'] = os.pathsep.join(default_include_dirs)\n defaults['src_dirs'] = os.pathsep.join(default_src_dirs)\n defaults['search_static_first'] = str(self.search_static_first)\n self.cp = ConfigParser.ConfigParser(defaults)\n try:\n f = __file__\n except NameError,msg:\n f = sys.argv[0]\n cf = os.path.join(os.path.split(os.path.abspath(f))[0],\n 'site.cfg')\n self.cp.read([cf])\n if not self.cp.has_section(self.section):\n self.cp.add_section(self.section)\n self.search_static_first = self.cp.getboolean(self.section,\n 'search_static_first')\n assert isinstance(self.search_static_first, type(0))\n\n def calc_libraries_info(self):\n libs = self.get_libraries()\n dirs = self.get_lib_dirs()\n info = {}\n for lib in libs:\n i = None\n for d in dirs:\n i = self.check_libs(d,[lib]) \n if i is not None:\n break\n if i is not None:\n dict_append(info,**i)\n else:\n print 'Library %s was not found. Ignoring' % (lib)\n return info\n\n def set_info(self,**info):\n if info: \n lib_info = self.calc_libraries_info()\n dict_append(info,**lib_info)\n self.saved_results[self.__class__.__name__] = info\n\n def has_info(self):\n return self.saved_results.has_key(self.__class__.__name__)\n\n def get_info(self,notfound_action=0):\n \"\"\" Return a dictonary with items that are compatible\n with scipy_distutils.setup keyword arguments.\n \"\"\"\n flag = 0\n if not self.has_info():\n flag = 1\n if self.verbosity>0:\n print self.__class__.__name__ + ':'\n if hasattr(self, 'calc_info'):\n self.calc_info()\n if notfound_action:\n if not self.has_info():\n if notfound_action==1:\n warnings.warn(self.notfounderror.__doc__)\n elif notfound_action==2:\n raise self.notfounderror,self.notfounderror.__doc__\n else:\n raise ValueError,`notfound_action`\n\n if self.verbosity>0:\n if not self.has_info():\n print ' NOT AVAILABLE'\n self.set_info()\n else:\n print ' FOUND:'\n \n res = self.saved_results.get(self.__class__.__name__)\n if self.verbosity>0 and flag:\n for k,v in res.items():\n v = str(v)\n if k=='sources' and len(v)>200: v = v[:60]+' ...\\n... '+v[-60:]\n print ' %s = %s'%(k,v)\n print\n \n return res\n\n def get_paths(self, section, key):\n dirs = self.cp.get(section, key).split(os.pathsep)\n env_var = self.dir_env_var\n if env_var:\n if type(env_var) is type([]):\n e0 = env_var[-1]\n for e in env_var:\n if os.environ.has_key(e):\n e0 = e\n break\n if not env_var[0]==e0:\n print 'Setting %s=%s' % (env_var[0],e0)\n env_var = e0\n if env_var and os.environ.has_key(env_var):\n d = os.environ[env_var]\n if d=='None':\n print 'Disabled',self.__class__.__name__,'(%s is None)' \\\n % (self.dir_env_var)\n return []\n if os.path.isfile(d):\n dirs = [os.path.dirname(d)] + dirs\n l = getattr(self,'_lib_names',[])\n if len(l)==1:\n b = os.path.basename(d)\n b = os.path.splitext(b)[0]\n if b[:3]=='lib':\n print 'Replacing _lib_names[0]==%r with %r' \\\n % (self._lib_names[0], b[3:])\n self._lib_names[0] = b[3:]\n else:\n ds = d.split(os.pathsep)\n ds2 = []\n for d in ds:\n if os.path.isdir(d):\n ds2.append(d)\n for dd in ['include','lib']:\n d1 = os.path.join(d,dd)\n if os.path.isdir(d1):\n ds2.append(d1)\n dirs = ds2 + dirs\n default_dirs = self.cp.get('DEFAULT', key).split(os.pathsep)\n dirs.extend(default_dirs)\n ret = []\n [ret.append(d) for d in dirs if os.path.isdir(d) and d not in ret]\n if self.verbosity>1:\n print '(',key,'=',':'.join(ret),')'\n return ret\n\n def get_lib_dirs(self, key='library_dirs'):\n return self.get_paths(self.section, key)\n\n def get_include_dirs(self, key='include_dirs'):\n return self.get_paths(self.section, key)\n\n def get_src_dirs(self, key='src_dirs'):\n return self.get_paths(self.section, key)\n\n def get_libs(self, key, default):\n try:\n libs = self.cp.get(self.section, key)\n except ConfigParser.NoOptionError:\n if not default:\n return []\n if type(default) is type(''):\n return [default]\n return default\n return [b for b in [a.strip() for a in libs.split(',')] if b]\n\n def get_libraries(self, key='libraries'):\n return self.get_libs(key,'')\n\n def check_libs(self,lib_dir,libs,opt_libs =[]):\n \"\"\" If static or shared libraries are available then return\n their info dictionary. \"\"\"\n if self.search_static_first:\n exts = ['.a',so_ext]\n else:\n exts = [so_ext,'.a']\n if sys.platform=='cygwin':\n exts.append('.dll.a')\n for ext in exts:\n info = self._check_libs(lib_dir,libs,opt_libs,ext)\n if info is not None: return info\n return\n\n def _lib_list(self, lib_dir, libs, ext):\n assert type(lib_dir) is type('')\n liblist = []\n for l in libs:\n p = self.combine_paths(lib_dir, 'lib'+l+ext)\n if p:\n assert len(p)==1\n liblist.append(p[0])\n return liblist\n\n def _extract_lib_names(self,libs):\n return [os.path.splitext(os.path.basename(p))[0][3:] \\\n for p in libs]\n\n def _check_libs(self,lib_dir,libs, opt_libs, ext):\n found_libs = self._lib_list(lib_dir, libs, ext)\n if len(found_libs) == len(libs):\n found_libs = self._extract_lib_names(found_libs)\n info = {'libraries' : found_libs, 'library_dirs' : [lib_dir]}\n opt_found_libs = self._lib_list(lib_dir, opt_libs, ext)\n if len(opt_found_libs) == len(opt_libs):\n opt_found_libs = self._extract_lib_names(opt_found_libs)\n info['libraries'].extend(opt_found_libs)\n return info\n\n def combine_paths(self,*args):\n return combine_paths(*args,**{'verbosity':self.verbosity})\n\nclass fftw_info(system_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['rfftw', 'fftw']\n includes = ['fftw.h','rfftw.h']\n macros = [('SCIPY_FFTW_H',None)]\n notfounderror = FFTWNotFoundError\n\n def __init__(self):\n system_info.__init__(self)\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n incl_dirs = self.get_include_dirs()\n incl_dir = None\n libs = self.get_libs(self.section+'_libs', self.libs)\n info = None\n for d in lib_dirs:\n r = self.check_libs(d,libs)\n if r is not None:\n info = r\n break\n if info is not None:\n flag = 0\n for d in incl_dirs:\n if len(self.combine_paths(d,self.includes))==2:\n dict_append(info,include_dirs=[d])\n flag = 1\n incl_dirs = [d]\n incl_dir = d\n break\n if flag:\n dict_append(info,define_macros=self.macros)\n else:\n info = None\n if info is not None:\n self.set_info(**info)\n\nclass dfftw_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['drfftw','dfftw']\n includes = ['dfftw.h','drfftw.h']\n macros = [('SCIPY_DFFTW_H',None)]\n\nclass sfftw_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['srfftw','sfftw']\n includes = ['sfftw.h','srfftw.h']\n macros = [('SCIPY_SFFTW_H',None)]\n\nclass fftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['rfftw_threads','fftw_threads']\n includes = ['fftw_threads.h','rfftw_threads.h']\n macros = [('SCIPY_FFTW_THREADS_H',None)]\n\nclass dfftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['drfftw_threads','dfftw_threads']\n includes = ['dfftw_threads.h','drfftw_threads.h']\n macros = [('SCIPY_DFFTW_THREADS_H',None)]\n\nclass sfftw_threads_info(fftw_info):\n section = 'fftw'\n dir_env_var = 'FFTW'\n libs = ['srfftw_threads','sfftw_threads']\n includes = ['sfftw_threads.h','srfftw_threads.h']\n macros = [('SCIPY_SFFTW_THREADS_H',None)]\n\nclass djbfft_info(system_info):\n section = 'djbfft'\n dir_env_var = 'DJBFFT'\n notfounderror = DJBFFTNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend(self.combine_paths(d,['djbfft'])+[d])\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n incl_dirs = self.get_include_dirs()\n info = None\n for d in lib_dirs:\n p = self.combine_paths (d,['djbfft.a'])\n if p:\n info = {'extra_objects':p}\n break\n p = self.combine_paths (d,['libdjbfft.a'])\n if p:\n info = {'libraries':['djbfft'],'library_dirs':[d]}\n break\n if info is None:\n return\n for d in incl_dirs:\n if len(self.combine_paths(d,['fftc8.h','fftfreq.h']))==2:\n dict_append(info,include_dirs=[d],\n define_macros=[('SCIPY_DJBFFT_H',None)])\n self.set_info(**info)\n return\n return\n\nclass atlas_info(system_info):\n section = 'atlas'\n dir_env_var = 'ATLAS'\n _lib_names = ['f77blas','cblas']\n if sys.platform[:7]=='freebsd':\n _lib_atlas = ['atlas_r']\n _lib_lapack = ['alapack_r']\n else:\n _lib_atlas = ['atlas']\n _lib_lapack = ['lapack']\n\n notfounderror = AtlasNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend(self.combine_paths(d,['atlas*','ATLAS*',\n 'sse','3dnow','sse2'])+[d])\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n info = {}\n atlas_libs = self.get_libs('atlas_libs',\n self._lib_names + self._lib_atlas)\n lapack_libs = self.get_libs('lapack_libs',self._lib_lapack)\n atlas = None\n lapack = None\n atlas_1 = None\n for d in lib_dirs:\n atlas = self.check_libs(d,atlas_libs,[])\n lapack_atlas = self.check_libs(d,['lapack_atlas'],[])\n if atlas is not None:\n lib_dirs2 = self.combine_paths(d,['atlas*','ATLAS*'])+[d]\n for d2 in lib_dirs2:\n lapack = self.check_libs(d2,lapack_libs,[])\n if lapack is not None:\n break\n else:\n lapack = None\n if lapack is not None:\n break\n if atlas:\n atlas_1 = atlas\n print self.__class__\n if atlas is None:\n atlas = atlas_1\n if atlas is None:\n return\n include_dirs = self.get_include_dirs()\n h = (self.combine_paths(lib_dirs+include_dirs,'cblas.h') or [None])[0]\n if h:\n h = os.path.dirname(h)\n dict_append(info,include_dirs=[h])\n info['language'] = 'c'\n if lapack is not None:\n dict_append(info,**lapack)\n dict_append(info,**atlas)\n elif 'lapack_atlas' in atlas['libraries']:\n dict_append(info,**atlas)\n dict_append(info,define_macros=[('ATLAS_WITH_LAPACK_ATLAS',None)])\n self.set_info(**info)\n return\n else:\n dict_append(info,**atlas)\n dict_append(info,define_macros=[('ATLAS_WITHOUT_LAPACK',None)])\n message = \"\"\"\n*********************************************************************\n Could not find lapack library within the ATLAS installation.\n*********************************************************************\n\"\"\"\n warnings.warn(message)\n self.set_info(**info)\n return\n \n # Check if lapack library is complete, only warn if it is not.\n lapack_dir = lapack['library_dirs'][0]\n lapack_name = lapack['libraries'][0]\n lapack_lib = None\n for e in ['.a',so_ext]:\n fn = os.path.join(lapack_dir,'lib'+lapack_name+e)\n if os.path.exists(fn):\n lapack_lib = fn\n break\n if lapack_lib is not None:\n sz = os.stat(lapack_lib)[6]\n if sz <= 4000*1024:\n message = \"\"\"\n*********************************************************************\n Lapack library (from ATLAS) is probably incomplete:\n size of %s is %sk (expected >4000k)\n\n Follow the instructions in the KNOWN PROBLEMS section of the file\n scipy/INSTALL.txt.\n*********************************************************************\n\"\"\" % (lapack_lib,sz/1024)\n warnings.warn(message)\n else:\n info['language'] = 'f77'\n\n self.set_info(**info)\n\nclass atlas_blas_info(atlas_info):\n _lib_names = ['f77blas','cblas']\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n info = {}\n atlas_libs = self.get_libs('atlas_libs',\n self._lib_names + self._lib_atlas)\n atlas = None\n for d in lib_dirs:\n atlas = self.check_libs(d,atlas_libs,[])\n if atlas is not None:\n break\n if atlas is None:\n return\n include_dirs = self.get_include_dirs()\n h = (self.combine_paths(lib_dirs+include_dirs,'cblas.h') or [None])[0]\n if h:\n h = os.path.dirname(h)\n dict_append(info,include_dirs=[h])\n info['language'] = 'c'\n\n dict_append(info,**atlas)\n\n self.set_info(**info)\n return\n\n\nclass atlas_threads_info(atlas_info):\n dir_env_var = ['PTATLAS','ATLAS']\n _lib_names = ['ptf77blas','ptcblas']\n\nclass atlas_blas_threads_info(atlas_blas_info):\n dir_env_var = ['PTATLAS','ATLAS']\n _lib_names = ['ptf77blas','ptcblas']\n\nclass lapack_atlas_info(atlas_info):\n _lib_names = ['lapack_atlas'] + atlas_info._lib_names\n\nclass lapack_atlas_threads_info(atlas_threads_info):\n _lib_names = ['lapack_atlas'] + atlas_threads_info._lib_names\n\nclass lapack_info(system_info):\n section = 'lapack'\n dir_env_var = 'LAPACK'\n _lib_names = ['lapack']\n notfounderror = LapackNotFoundError\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n\n lapack_libs = self.get_libs('lapack_libs', self._lib_names)\n for d in lib_dirs:\n lapack = self.check_libs(d,lapack_libs,[])\n if lapack is not None:\n info = lapack \n break\n else:\n return\n info['language'] = 'f77'\n self.set_info(**info)\n\nclass lapack_src_info(system_info):\n section = 'lapack_src'\n dir_env_var = 'LAPACK_SRC'\n notfounderror = LapackSrcNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['LAPACK*/SRC','SRC']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'dgesv.f')):\n src_dir = d\n break\n if not src_dir:\n #XXX: Get sources from netlib. May be ask first.\n return\n # The following is extracted from LAPACK-3.0/SRC/Makefile\n allaux='''\n ilaenv ieeeck lsame lsamen xerbla\n ''' # *.f\n laux = '''\n bdsdc bdsqr disna labad lacpy ladiv lae2 laebz laed0 laed1\n laed2 laed3 laed4 laed5 laed6 laed7 laed8 laed9 laeda laev2\n lagtf lagts lamch lamrg lanst lapy2 lapy3 larnv larrb larre\n larrf lartg laruv las2 lascl lasd0 lasd1 lasd2 lasd3 lasd4\n lasd5 lasd6 lasd7 lasd8 lasd9 lasda lasdq lasdt laset lasq1\n lasq2 lasq3 lasq4 lasq5 lasq6 lasr lasrt lassq lasv2 pttrf\n stebz stedc steqr sterf\n ''' # [s|d]*.f\n lasrc = '''\n gbbrd gbcon gbequ gbrfs gbsv gbsvx gbtf2 gbtrf gbtrs gebak\n gebal gebd2 gebrd gecon geequ gees geesx geev geevx gegs gegv\n gehd2 gehrd gelq2 gelqf gels gelsd gelss gelsx gelsy geql2\n geqlf geqp3 geqpf geqr2 geqrf gerfs gerq2 gerqf gesc2 gesdd\n gesv gesvd gesvx getc2 getf2 getrf getri getrs ggbak ggbal\n gges ggesx ggev ggevx ggglm gghrd gglse ggqrf ggrqf ggsvd\n ggsvp gtcon gtrfs gtsv gtsvx gttrf gttrs gtts2 hgeqz hsein\n hseqr labrd lacon laein lags2 lagtm lahqr lahrd laic1 lals0\n lalsa lalsd langb lange langt lanhs lansb lansp lansy lantb\n lantp lantr lapll lapmt laqgb laqge laqp2 laqps laqsb laqsp\n laqsy lar1v lar2v larf larfb larfg larft larfx largv larrv\n lartv larz larzb larzt laswp lasyf latbs latdf latps latrd\n latrs latrz latzm lauu2 lauum pbcon pbequ pbrfs pbstf pbsv\n pbsvx pbtf2 pbtrf pbtrs pocon poequ porfs posv posvx potf2\n potrf potri potrs ppcon ppequ pprfs ppsv ppsvx pptrf pptri\n pptrs ptcon pteqr ptrfs ptsv ptsvx pttrs ptts2 spcon sprfs\n spsv spsvx sptrf sptri sptrs stegr stein sycon syrfs sysv\n sysvx sytf2 sytrf sytri sytrs tbcon tbrfs tbtrs tgevc tgex2\n tgexc tgsen tgsja tgsna tgsy2 tgsyl tpcon tprfs tptri tptrs\n trcon trevc trexc trrfs trsen trsna trsyl trti2 trtri trtrs\n tzrqf tzrzf\n ''' # [s|c|d|z]*.f\n sd_lasrc = '''\n laexc lag2 lagv2 laln2 lanv2 laqtr lasy2 opgtr opmtr org2l\n org2r orgbr orghr orgl2 orglq orgql orgqr orgr2 orgrq orgtr\n orm2l orm2r ormbr ormhr orml2 ormlq ormql ormqr ormr2 ormr3\n ormrq ormrz ormtr rscl sbev sbevd sbevx sbgst sbgv sbgvd sbgvx\n sbtrd spev spevd spevx spgst spgv spgvd spgvx sptrd stev stevd\n stevr stevx syev syevd syevr syevx sygs2 sygst sygv sygvd\n sygvx sytd2 sytrd\n ''' # [s|d]*.f\n cz_lasrc = '''\n bdsqr hbev hbevd hbevx hbgst hbgv hbgvd hbgvx hbtrd hecon heev\n heevd heevr heevx hegs2 hegst hegv hegvd hegvx herfs hesv\n hesvx hetd2 hetf2 hetrd hetrf hetri hetrs hpcon hpev hpevd\n hpevx hpgst hpgv hpgvd hpgvx hprfs hpsv hpsvx hptrd hptrf\n hptri hptrs lacgv lacp2 lacpy lacrm lacrt ladiv laed0 laed7\n laed8 laesy laev2 lahef lanhb lanhe lanhp lanht laqhb laqhe\n laqhp larcm larnv lartg lascl laset lasr lassq pttrf rot spmv\n spr stedc steqr symv syr ung2l ung2r ungbr unghr ungl2 unglq\n ungql ungqr ungr2 ungrq ungtr unm2l unm2r unmbr unmhr unml2\n unmlq unmql unmqr unmr2 unmr3 unmrq unmrz unmtr upgtr upmtr\n ''' # [c|z]*.f\n #######\n sclaux = laux + ' econd ' # s*.f\n dzlaux = laux + ' secnd ' # d*.f\n slasrc = lasrc + sd_lasrc # s*.f\n dlasrc = lasrc + sd_lasrc # d*.f\n clasrc = lasrc + cz_lasrc + ' srot srscl ' # c*.f\n zlasrc = lasrc + cz_lasrc + ' drot drscl ' # z*.f\n oclasrc = ' icmax1 scsum1 ' # *.f\n ozlasrc = ' izmax1 dzsum1 ' # *.f\n sources = ['s%s.f'%f for f in (sclaux+slasrc).split()] \\\n + ['d%s.f'%f for f in (dzlaux+dlasrc).split()] \\\n + ['c%s.f'%f for f in (clasrc).split()] \\\n + ['z%s.f'%f for f in (zlasrc).split()] \\\n + ['%s.f'%f for f in (allaux+oclasrc+ozlasrc).split()]\n sources = [os.path.join(src_dir,f) for f in sources]\n #XXX: should we check here actual existence of source files?\n info = {'sources':sources,'language':'f77'}\n self.set_info(**info)\n\natlas_version_c_text = r'''\n/* This file is generated from scipy_distutils/system_info.py */\n#ifdef __CPLUSPLUS__\nextern \"C\" {\n#endif\n#include \"Python.h\"\nstatic PyMethodDef module_methods[] = { {NULL,NULL} };\nDL_EXPORT(void) initatlas_version(void) {\n void ATL_buildinfo(void);\n ATL_buildinfo();\n Py_InitModule(\"atlas_version\", module_methods);\n}\n#ifdef __CPLUSCPLUS__\n}\n#endif\n'''\n\ndef get_atlas_version(**config):\n from core import Extension, setup\n from misc_util import get_build_temp\n import log\n magic = hex(hash(`config`))\n def atlas_version_c(extension, build_dir,magic=magic):\n source = os.path.join(build_dir,'atlas_version_%s.c' % (magic))\n if os.path.isfile(source):\n from distutils.dep_util import newer\n if newer(source,__file__):\n return source\n f = open(source,'w')\n f.write(atlas_version_c_text)\n f.close()\n return source\n ext = Extension('atlas_version',\n sources=[atlas_version_c],\n **config)\n extra_args = ['--build-lib',get_build_temp()]\n for a in sys.argv:\n if re.match('[-][-]compiler[=]',a):\n extra_args.append(a)\n try:\n dist = setup(ext_modules=[ext],\n script_name = 'get_atlas_version',\n script_args = ['build_src','build_ext']+extra_args)\n except Exception,msg:\n print \"##### msg: %s\" % msg\n if not msg:\n msg = \"Unknown Exception\"\n log.warn(msg)\n return None\n\n from distutils.sysconfig import get_config_var\n so_ext = get_config_var('SO')\n build_ext = dist.get_command_obj('build_ext')\n target = os.path.join(build_ext.build_lib,'atlas_version'+so_ext)\n from exec_command import exec_command,get_pythonexe\n cmd = [get_pythonexe(),'-c',\n '\"import imp;imp.load_dynamic(\\\\\"atlas_version\\\\\",\\\\\"%s\\\\\")\"'\\\n % (os.path.basename(target))]\n s,o = exec_command(cmd,execute_in=os.path.dirname(target),use_tee=0)\n atlas_version = None\n if not s:\n m = re.search(r'ATLAS version (?P\\d+[.]\\d+[.]\\d+)',o)\n if m:\n atlas_version = m.group('version')\n if atlas_version is None:\n if re.search(r'undefined symbol: ATL_buildinfo',o,re.M):\n atlas_version = '3.2.1_pre3.3.6'\n else:\n print 'Command:',' '.join(cmd)\n print 'Status:',s\n print 'Output:',o\n return atlas_version\n\n\nclass lapack_opt_info(system_info):\n \n def calc_info(self):\n\n if sys.platform=='darwin' and not os.environ.get('ATLAS',None):\n args = []\n link_args = []\n if os.path.exists('/System/Library/Frameworks/Accelerate.framework/'):\n args.extend(['-faltivec','-framework','Accelerate'])\n link_args.extend(['-Wl,-framework','-Wl,Accelerate'])\n elif os.path.exists('/System/Library/Frameworks/vecLib.framework/'):\n args.extend(['-faltivec','-framework','vecLib'])\n link_args.extend(['-Wl,-framework','-Wl,vecLib'])\n if args:\n self.set_info(extra_compile_args=args,\n extra_link_args=link_args,\n define_macros=[('NO_ATLAS_INFO',3)])\n return\n\n atlas_info = get_info('atlas_threads')\n if not atlas_info:\n atlas_info = get_info('atlas')\n #atlas_info = {} ## uncomment for testing\n atlas_version = None\n need_lapack = 0\n need_blas = 0\n info = {}\n if atlas_info:\n version_info = atlas_info.copy()\n atlas_version = get_atlas_version(**version_info)\n if not atlas_info.has_key('define_macros'):\n atlas_info['define_macros'] = []\n if atlas_version is None:\n atlas_info['define_macros'].append(('NO_ATLAS_INFO',2))\n else:\n atlas_info['define_macros'].append(('ATLAS_INFO',\n '\"\\\\\"%s\\\\\"\"' % atlas_version))\n\t\tif atlas_version=='3.2.1_pre3.3.6':\n\t\t atlas_info['define_macros'].append(('NO_ATLAS_INFO',4))\n l = atlas_info.get('define_macros',[])\n if ('ATLAS_WITH_LAPACK_ATLAS',None) in l \\\n or ('ATLAS_WITHOUT_LAPACK',None) in l:\n need_lapack = 1\n info = atlas_info\n else:\n warnings.warn(AtlasNotFoundError.__doc__)\n need_blas = 1\n need_lapack = 1\n dict_append(info,define_macros=[('NO_ATLAS_INFO',1)])\n\n if need_lapack:\n lapack_info = get_info('lapack')\n #lapack_info = {} ## uncomment for testing\n if lapack_info:\n dict_append(info,**lapack_info)\n else:\n warnings.warn(LapackNotFoundError.__doc__)\n lapack_src_info = get_info('lapack_src')\n if not lapack_src_info:\n warnings.warn(LapackSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('flapack_src',lapack_src_info)])\n\n if need_blas:\n blas_info = get_info('blas')\n #blas_info = {} ## uncomment for testing\n if blas_info:\n dict_append(info,**blas_info)\n else:\n warnings.warn(BlasNotFoundError.__doc__)\n blas_src_info = get_info('blas_src')\n if not blas_src_info:\n warnings.warn(BlasSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('fblas_src',blas_src_info)])\n\n self.set_info(**info)\n return\n\n\nclass blas_opt_info(system_info):\n \n def calc_info(self):\n\n if sys.platform=='darwin' and not os.environ.get('ATLAS',None):\n args = []\n link_args = []\n if os.path.exists('/System/Library/Frameworks/Accelerate.framework/'):\n args.extend(['-faltivec','-framework','Accelerate'])\n link_args.extend(['-Wl,-framework','-Wl,Accelerate'])\n elif os.path.exists('/System/Library/Frameworks/vecLib.framework/'):\n args.extend(['-faltivec','-framework','vecLib'])\n link_args.extend(['-Wl,-framework','-Wl,vecLib'])\n if args:\n self.set_info(extra_compile_args=args,\n extra_link_args=link_args,\n define_macros=[('NO_ATLAS_INFO',3)])\n return\n\n atlas_info = get_info('atlas_blas_threads')\n if not atlas_info:\n atlas_info = get_info('atlas_blas')\n atlas_version = None\n need_blas = 0\n info = {}\n if atlas_info:\n version_info = atlas_info.copy()\n atlas_version = get_atlas_version(**version_info)\n if not atlas_info.has_key('define_macros'):\n atlas_info['define_macros'] = []\n if atlas_version is None:\n atlas_info['define_macros'].append(('NO_ATLAS_INFO',2))\n else:\n atlas_info['define_macros'].append(('ATLAS_INFO',\n '\"\\\\\"%s\\\\\"\"' % atlas_version))\n info = atlas_info\n else:\n warnings.warn(AtlasNotFoundError.__doc__)\n need_blas = 1\n dict_append(info,define_macros=[('NO_ATLAS_INFO',1)])\n\n if need_blas:\n blas_info = get_info('blas')\n if blas_info:\n dict_append(info,**blas_info)\n else:\n warnings.warn(BlasNotFoundError.__doc__)\n blas_src_info = get_info('blas_src')\n if not blas_src_info:\n warnings.warn(BlasSrcNotFoundError.__doc__)\n return\n dict_append(info,libraries=[('fblas_src',blas_src_info)])\n\n self.set_info(**info)\n return\n\n\nclass blas_info(system_info):\n section = 'blas'\n dir_env_var = 'BLAS'\n _lib_names = ['blas']\n notfounderror = BlasNotFoundError\n\n def calc_info(self):\n lib_dirs = self.get_lib_dirs()\n\n blas_libs = self.get_libs('blas_libs', self._lib_names)\n for d in lib_dirs:\n blas = self.check_libs(d,blas_libs,[])\n if blas is not None:\n info = blas \n break\n else:\n return\n info['language'] = 'f77' # XXX: is it generally true?\n self.set_info(**info)\n\n\nclass blas_src_info(system_info):\n section = 'blas_src'\n dir_env_var = 'BLAS_SRC'\n notfounderror = BlasSrcNotFoundError\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['blas']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'daxpy.f')):\n src_dir = d\n break\n if not src_dir:\n #XXX: Get sources from netlib. May be ask first.\n return\n blas1 = '''\n caxpy csscal dnrm2 dzasum saxpy srotg zdotc ccopy cswap drot\n dznrm2 scasum srotm zdotu cdotc dasum drotg icamax scnrm2\n srotmg zdrot cdotu daxpy drotm idamax scopy sscal zdscal crotg\n dcabs1 drotmg isamax sdot sswap zrotg cscal dcopy dscal izamax\n snrm2 zaxpy zscal csrot ddot dswap sasum srot zcopy zswap\n '''\n blas2 = '''\n cgbmv chpmv ctrsv dsymv dtrsv sspr2 strmv zhemv ztpmv cgemv\n chpr dgbmv dsyr lsame ssymv strsv zher ztpsv cgerc chpr2 dgemv\n dsyr2 sgbmv ssyr xerbla zher2 ztrmv cgeru ctbmv dger dtbmv\n sgemv ssyr2 zgbmv zhpmv ztrsv chbmv ctbsv dsbmv dtbsv sger\n stbmv zgemv zhpr chemv ctpmv dspmv dtpmv ssbmv stbsv zgerc\n zhpr2 cher ctpsv dspr dtpsv sspmv stpmv zgeru ztbmv cher2\n ctrmv dspr2 dtrmv sspr stpsv zhbmv ztbsv\n '''\n blas3 = '''\n cgemm csymm ctrsm dsyrk sgemm strmm zhemm zsyr2k chemm csyr2k\n dgemm dtrmm ssymm strsm zher2k zsyrk cher2k csyrk dsymm dtrsm\n ssyr2k zherk ztrmm cherk ctrmm dsyr2k ssyrk zgemm zsymm ztrsm\n '''\n sources = [os.path.join(src_dir,f+'.f') \\\n for f in (blas1+blas2+blas3).split()]\n #XXX: should we check here actual existence of source files?\n info = {'sources':sources,'language':'f77'}\n self.set_info(**info)\n\nclass x11_info(system_info):\n section = 'x11'\n notfounderror = X11NotFoundError\n\n def __init__(self):\n system_info.__init__(self,\n default_lib_dirs=default_x11_lib_dirs,\n default_include_dirs=default_x11_include_dirs)\n\n def calc_info(self):\n if sys.platform in ['win32']:\n return\n lib_dirs = self.get_lib_dirs()\n include_dirs = self.get_include_dirs()\n x11_libs = self.get_libs('x11_libs', ['X11'])\n for lib_dir in lib_dirs:\n info = self.check_libs(lib_dir, x11_libs, [])\n if info is not None:\n break\n else:\n return\n inc_dir = None\n for d in include_dirs:\n if self.combine_paths(d, 'X11/X.h'):\n inc_dir = d\n break\n if inc_dir is not None:\n dict_append(info, include_dirs=[inc_dir])\n self.set_info(**info)\n\nclass numpy_info(system_info):\n section = 'numpy'\n modulename = 'Numeric'\n notfounderror = NumericNotFoundError\n\n def __init__(self):\n from distutils.sysconfig import get_python_inc\n include_dirs = []\n try:\n module = __import__(self.modulename)\n prefix = []\n for name in module.__file__.split(os.sep):\n if name=='lib':\n break\n prefix.append(name)\n include_dirs.append(get_python_inc(prefix=os.sep.join(prefix)))\n except ImportError:\n pass\n py_incl_dir = get_python_inc()\n include_dirs.append(py_incl_dir)\n for d in default_include_dirs:\n d = os.path.join(d, os.path.basename(py_incl_dir))\n if d not in include_dirs:\n include_dirs.append(d)\n system_info.__init__(self,\n default_lib_dirs=[],\n default_include_dirs=include_dirs)\n\n def calc_info(self):\n try:\n module = __import__(self.modulename)\n except ImportError:\n return\n info = {}\n macros = [(self.modulename.upper()+'_VERSION',\n '\"\\\\\"%s\\\\\"\"' % (module.__version__))]\n## try:\n## macros.append(\n## (self.modulename.upper()+'_VERSION_HEX',\n## hex(vstr2hex(module.__version__))),\n## )\n## except Exception,msg:\n## print msg\n dict_append(info, define_macros = macros)\n include_dirs = self.get_include_dirs()\n inc_dir = None\n for d in include_dirs:\n if self.combine_paths(d,\n os.path.join(self.modulename,\n 'arrayobject.h')):\n inc_dir = d\n break\n if inc_dir is not None:\n dict_append(info, include_dirs=[inc_dir])\n if info:\n self.set_info(**info)\n return\n\nclass numarray_info(numpy_info):\n section = 'numarray'\n modulename = 'numarray'\n\nclass boost_python_info(system_info):\n section = 'boost_python'\n dir_env_var = 'BOOST'\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['boost*']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n from distutils.sysconfig import get_python_inc\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'libs','python','src','module.cpp')):\n src_dir = d\n break\n if not src_dir:\n return\n py_incl_dir = get_python_inc()\n srcs_dir = os.path.join(src_dir,'libs','python','src')\n bpl_srcs = glob(os.path.join(srcs_dir,'*.cpp'))\n bpl_srcs += glob(os.path.join(srcs_dir,'*','*.cpp'))\n info = {'libraries':[('boost_python_src',{'include_dirs':[src_dir,py_incl_dir],\n 'sources':bpl_srcs})],\n 'include_dirs':[src_dir],\n }\n if info:\n self.set_info(**info)\n return\n\nclass agg2_info(system_info):\n section = 'agg2'\n dir_env_var = 'AGG2'\n\n def get_paths(self, section, key):\n pre_dirs = system_info.get_paths(self, section, key)\n dirs = []\n for d in pre_dirs:\n dirs.extend([d] + self.combine_paths(d,['agg2*']))\n return [ d for d in dirs if os.path.isdir(d) ]\n\n def calc_info(self):\n src_dirs = self.get_src_dirs()\n src_dir = ''\n for d in src_dirs:\n if os.path.isfile(os.path.join(d,'src','agg_affine_matrix.cpp')):\n src_dir = d\n break\n if not src_dir:\n return\n if sys.platform=='win32':\n agg2_srcs = glob(os.path.join(src_dir,'src','platform','win32','agg_win32_bmp.cpp'))\n else:\n agg2_srcs = glob(os.path.join(src_dir,'src','*.cpp'))\n agg2_srcs += [os.path.join(src_dir,'src','platform','X11','agg_platform_support.cpp')]\n \n info = {'libraries':[('agg2_src',{'sources':agg2_srcs,\n 'include_dirs':[os.path.join(src_dir,'include')],\n })],\n 'include_dirs':[os.path.join(src_dir,'include')],\n }\n if info:\n self.set_info(**info)\n return\n\nclass _pkg_config_info(system_info):\n section = None\n config_env_var = 'PKG_CONFIG'\n default_config_exe = 'pkg-config'\n append_config_exe = ''\n version_macro_name = None\n release_macro_name = None\n version_flag = '--modversion'\n cflags_flag = '--cflags'\n\n def get_config_exe(self):\n if os.environ.has_key(self.config_env_var):\n return os.environ[self.config_env_var]\n return self.default_config_exe\n def get_config_output(self, config_exe, option):\n s,o = exec_command(config_exe+' '+self.append_config_exe+' '+option,use_tee=0)\n if not s:\n return o\n\n def calc_info(self):\n config_exe = find_executable(self.get_config_exe())\n if not os.path.isfile(config_exe):\n print 'File not found: %s. Cannot determine %s info.' \\\n % (config_exe, self.section)\n return\n info = {}\n macros = []\n libraries = []\n library_dirs = []\n include_dirs = []\n extra_link_args = []\n extra_compile_args = []\n version = self.get_config_output(config_exe,self.version_flag)\n if version:\n macros.append((self.__class__.__name__.split('.')[-1].upper(),\n '\"\\\\\"%s\\\\\"\"' % (version)))\n if self.version_macro_name:\n macros.append((self.version_macro_name+'_%s' % (version.replace('.','_')),None))\n if self.release_macro_name:\n release = self.get_config_output(config_exe,'--release')\n if release:\n macros.append((self.release_macro_name+'_%s' % (release.replace('.','_')),None))\n opts = self.get_config_output(config_exe,'--libs')\n if opts:\n for opt in opts.split():\n if opt[:2]=='-l':\n libraries.append(opt[2:])\n elif opt[:2]=='-L':\n library_dirs.append(opt[2:])\n else:\n extra_link_args.append(opt)\n opts = self.get_config_output(config_exe,self.cflags_flag)\n if opts:\n for opt in opts.split():\n if opt[:2]=='-I':\n include_dirs.append(opt[2:])\n elif opt[:2]=='-D':\n if '=' in opt:\n n,v = opt[2:].split('=')\n macros.append((n,v))\n else:\n macros.append((opt[2:],None))\n else:\n extra_compile_args.append(opt)\n if macros: dict_append(info, define_macros = macros)\n if libraries: dict_append(info, libraries = libraries)\n if library_dirs: dict_append(info, library_dirs = library_dirs)\n if include_dirs: dict_append(info, include_dirs = include_dirs)\n if extra_link_args: dict_append(info, extra_link_args = extra_link_args)\n if extra_compile_args: dict_append(info, extra_compile_args = extra_compile_args)\n if info:\n self.set_info(**info)\n return\n\nclass wx_info(_pkg_config_info):\n section = 'wx'\n config_env_var = 'WX_CONFIG'\n default_config_exe = 'wx-config'\n append_config_exe = ''\n version_macro_name = 'WX_VERSION'\n release_macro_name = 'WX_RELEASE'\n version_flag = '--version'\n cflags_flag = '--cxxflags'\n\nclass gdk_pixbuf_xlib_2_info(_pkg_config_info):\n section = 'gdk_pixbuf_xlib_2'\n append_config_exe = 'gdk-pixbuf-xlib-2.0'\n version_macro_name = 'GDK_PIXBUF_XLIB_VERSION'\n\nclass gdk_pixbuf_2_info(_pkg_config_info):\n section = 'gdk_pixbuf_2'\n append_config_exe = 'gdk-pixbuf-2.0'\n version_macro_name = 'GDK_PIXBUF_VERSION'\n\nclass gdk_x11_2_info(_pkg_config_info):\n section = 'gdk_x11_2'\n append_config_exe = 'gdk-x11-2.0'\n version_macro_name = 'GDK_X11_VERSION'\n\nclass gdk_2_info(_pkg_config_info):\n section = 'gdk_2'\n append_config_exe = 'gdk-2.0'\n version_macro_name = 'GDK_VERSION'\n\nclass gdk_info(_pkg_config_info):\n section = 'gdk'\n append_config_exe = 'gdk'\n version_macro_name = 'GDK_VERSION'\n\nclass gtkp_x11_2_info(_pkg_config_info):\n section = 'gtkp_x11_2'\n append_config_exe = 'gtk+-x11-2.0'\n version_macro_name = 'GTK_X11_VERSION'\n\n\nclass gtkp_2_info(_pkg_config_info):\n section = 'gtkp_2'\n append_config_exe = 'gtk+-2.0'\n version_macro_name = 'GTK_VERSION'\n\nclass xft_info(_pkg_config_info):\n section = 'xft'\n append_config_exe = 'xft'\n version_macro_name = 'XFT_VERSION'\n\nclass freetype2_info(_pkg_config_info):\n section = 'freetype2'\n append_config_exe = 'freetype2'\n version_macro_name = 'FREETYPE2_VERSION'\n\n## def vstr2hex(version):\n## bits = []\n## n = [24,16,8,4,0]\n## r = 0\n## for s in version.split('.'):\n## r |= int(s) << n[0]\n## del n[0]\n## return r\n\n#--------------------------------------------------------------------\n\ndef combine_paths(*args,**kws):\n \"\"\" Return a list of existing paths composed by all combinations of\n items from arguments.\n \"\"\"\n r = []\n for a in args:\n if not a: continue\n if type(a) is types.StringType:\n a = [a]\n r.append(a)\n args = r\n if not args: return []\n if len(args)==1:\n result = reduce(lambda a,b:a+b,map(glob,args[0]),[])\n elif len (args)==2:\n result = []\n for a0 in args[0]:\n for a1 in args[1]:\n result.extend(glob(os.path.join(a0,a1)))\n else:\n result = combine_paths(*(combine_paths(args[0],args[1])+args[2:]))\n verbosity = kws.get('verbosity',1)\n if verbosity>1 and result:\n print '(','paths:',','.join(result),')'\n return result\n\nlanguage_map = {'c':0,'c++':1,'f77':2,'f90':3}\ninv_language_map = {0:'c',1:'c++',2:'f77',3:'f90'}\ndef dict_append(d,**kws):\n languages = []\n for k,v in kws.items():\n if k=='language':\n languages.append(v)\n continue\n if d.has_key(k):\n if k in ['library_dirs','include_dirs','define_macros']:\n [d[k].append(vv) for vv in v if vv not in d[k]]\n else:\n d[k].extend(v)\n else:\n d[k] = v\n if languages:\n l = inv_language_map[max([language_map.get(l,0) for l in languages])]\n d['language'] = l\n return\n\ndef show_all():\n import system_info\n import pprint\n match_info = re.compile(r'.*?_info').match\n show_only = []\n for n in sys.argv[1:]:\n if n[-5:] != '_info':\n n = n + '_info'\n show_only.append(n)\n show_all = not show_only\n for n in filter(match_info,dir(system_info)):\n if n in ['system_info','get_info']: continue\n if not show_all:\n if n not in show_only: continue\n del show_only[show_only.index(n)]\n c = getattr(system_info,n)()\n c.verbosity = 2\n r = c.get_info()\n if show_only:\n print 'Info classes not defined:',','.join(show_only)\nif __name__ == \"__main__\":\n show_all()\n", "methods": [ { "name": "get_info", "long_name": "get_info( name , notfound_action = 0 )", "filename": "system_info.py", "nloc": 44, "complexity": 1, "token_count": 198, "parameters": [ "name", "notfound_action" ], "start_line": 143, "end_line": 192, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 50, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , default_lib_dirs = default_lib_dirs , default_include_dirs = default_include_dirs , verbosity = 1 , )", "filename": "system_info.py", "nloc": 26, "complexity": 3, "token_count": 210, "parameters": [ "self", "default_lib_dirs", "default_include_dirs", "verbosity" ], "start_line": 272, "end_line": 297, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "calc_libraries_info", "long_name": "calc_libraries_info( self )", "filename": "system_info.py", "nloc": 15, "complexity": 5, "token_count": 78, "parameters": [ "self" ], "start_line": 299, "end_line": 313, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 15, "top_nesting_level": 1 }, { "name": "set_info", "long_name": "set_info( self , ** info )", "filename": "system_info.py", "nloc": 5, "complexity": 2, "token_count": 37, "parameters": [ "self", "info" ], "start_line": 315, "end_line": 319, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "has_info", "long_name": "has_info( self )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 18, "parameters": [ "self" ], "start_line": 321, "end_line": 322, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_info", "long_name": "get_info( self , notfound_action = 0 )", "filename": "system_info.py", "nloc": 30, "complexity": 15, "token_count": 206, "parameters": [ "self", "notfound_action" ], "start_line": 324, "end_line": 359, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 47, "complexity": 20, "token_count": 405, "parameters": [ "self", "section", "key" ], "start_line": 361, "end_line": 407, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 47, "top_nesting_level": 1 }, { "name": "get_lib_dirs", "long_name": "get_lib_dirs( self , key = 'library_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 409, "end_line": 410, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_include_dirs", "long_name": "get_include_dirs( self , key = 'include_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 412, "end_line": 413, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_src_dirs", "long_name": "get_src_dirs( self , key = 'src_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 415, "end_line": 416, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_libs", "long_name": "get_libs( self , key , default )", "filename": "system_info.py", "nloc": 10, "complexity": 7, "token_count": 79, "parameters": [ "self", "key", "default" ], "start_line": 418, "end_line": 427, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self , key = 'libraries' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 18, "parameters": [ "self", "key" ], "start_line": 429, "end_line": 430, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "check_libs", "long_name": "check_libs( self , lib_dir , libs , opt_libs = [ ] )", "filename": "system_info.py", "nloc": 11, "complexity": 5, "token_count": 77, "parameters": [ "self", "lib_dir", "libs", "opt_libs" ], "start_line": 432, "end_line": 444, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "_lib_list", "long_name": "_lib_list( self , lib_dir , libs , ext )", "filename": "system_info.py", "nloc": 9, "complexity": 3, "token_count": 65, "parameters": [ "self", "lib_dir", "libs", "ext" ], "start_line": 446, "end_line": 454, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "_extract_lib_names", "long_name": "_extract_lib_names( self , libs )", "filename": "system_info.py", "nloc": 3, "complexity": 2, "token_count": 37, "parameters": [ "self", "libs" ], "start_line": 456, "end_line": 458, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "_check_libs", "long_name": "_check_libs( self , lib_dir , libs , opt_libs , ext )", "filename": "system_info.py", "nloc": 10, "complexity": 3, "token_count": 99, "parameters": [ "self", "lib_dir", "libs", "opt_libs", "ext" ], "start_line": 460, "end_line": 469, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "combine_paths", "long_name": "combine_paths( self , * args )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 23, "parameters": [ "self", "args" ], "start_line": 471, "end_line": 472, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 11, "parameters": [ "self" ], "start_line": 482, "end_line": 483, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 26, "complexity": 8, "token_count": 150, "parameters": [ "self" ], "start_line": 485, "end_line": 510, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 552, "end_line": 557, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 22, "complexity": 7, "token_count": 140, "parameters": [ "self" ], "start_line": 559, "end_line": 580, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 7, "complexity": 4, "token_count": 74, "parameters": [ "self", "section", "key" ], "start_line": 595, "end_line": 601, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 78, "complexity": 17, "token_count": 441, "parameters": [ "self" ], "start_line": 603, "end_line": 683, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 81, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 21, "complexity": 6, "token_count": 138, "parameters": [ "self" ], "start_line": 688, "end_line": 710, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 12, "complexity": 3, "token_count": 68, "parameters": [ "self" ], "start_line": 733, "end_line": 745, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 68, "parameters": [ "self", "section", "key" ], "start_line": 752, "end_line": 757, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 81, "complexity": 10, "token_count": 232, "parameters": [ "self" ], "start_line": 759, "end_line": 843, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 85, "top_nesting_level": 1 }, { "name": "get_atlas_version.atlas_version_c", "long_name": "get_atlas_version.atlas_version_c( extension , build_dir , magic = magic )", "filename": "system_info.py", "nloc": 10, "complexity": 3, "token_count": 74, "parameters": [ "extension", "build_dir", "magic" ], "start_line": 867, "end_line": 876, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "get_atlas_version", "long_name": "get_atlas_version( ** config )", "filename": "system_info.py", "nloc": 45, "complexity": 9, "token_count": 289, "parameters": [ "config" ], "start_line": 862, "end_line": 916, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 55, "top_nesting_level": 0 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 68, "complexity": 19, "token_count": 434, "parameters": [ "self" ], "start_line": 921, "end_line": 996, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 76, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 49, "complexity": 13, "token_count": 316, "parameters": [ "self" ], "start_line": 1001, "end_line": 1053, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 53, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 12, "complexity": 3, "token_count": 68, "parameters": [ "self" ], "start_line": 1062, "end_line": 1074, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1082, "end_line": 1087, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 34, "complexity": 5, "token_count": 106, "parameters": [ "self" ], "start_line": 1089, "end_line": 1124, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 4, "complexity": 1, "token_count": 19, "parameters": [ "self" ], "start_line": 1130, "end_line": 1133, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 20, "complexity": 7, "token_count": 114, "parameters": [ "self" ], "start_line": 1135, "end_line": 1154, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 22, "complexity": 6, "token_count": 139, "parameters": [ "self" ], "start_line": 1161, "end_line": 1182, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 23, "complexity": 6, "token_count": 133, "parameters": [ "self" ], "start_line": 1184, "end_line": 1213, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 30, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1223, "end_line": 1228, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 21, "complexity": 5, "token_count": 156, "parameters": [ "self" ], "start_line": 1230, "end_line": 1250, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 21, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1256, "end_line": 1261, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 22, "complexity": 6, "token_count": 177, "parameters": [ "self" ], "start_line": 1263, "end_line": 1285, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "get_config_exe", "long_name": "get_config_exe( self )", "filename": "system_info.py", "nloc": 4, "complexity": 2, "token_count": 30, "parameters": [ "self" ], "start_line": 1297, "end_line": 1300, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "get_config_output", "long_name": "get_config_output( self , config_exe , option )", "filename": "system_info.py", "nloc": 4, "complexity": 2, "token_count": 37, "parameters": [ "self", "config_exe", "option" ], "start_line": 1301, "end_line": 1304, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 54, "complexity": 22, "token_count": 435, "parameters": [ "self" ], "start_line": 1306, "end_line": 1359, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 54, "top_nesting_level": 1 }, { "name": "combine_paths", "long_name": "combine_paths( * args , ** kws )", "filename": "system_info.py", "nloc": 22, "complexity": 11, "token_count": 195, "parameters": [ "args", "kws" ], "start_line": 1428, "end_line": 1452, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 0 }, { "name": "dict_append", "long_name": "dict_append( d , ** kws )", "filename": "system_info.py", "nloc": 17, "complexity": 9, "token_count": 128, "parameters": [ "d", "kws" ], "start_line": 1456, "end_line": 1472, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "show_all", "long_name": "show_all( )", "filename": "system_info.py", "nloc": 20, "complexity": 8, "token_count": 137, "parameters": [], "start_line": 1474, "end_line": 1493, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 } ], "methods_before": [ { "name": "get_info", "long_name": "get_info( name , notfound_action = 0 )", "filename": "system_info.py", "nloc": 43, "complexity": 1, "token_count": 194, "parameters": [ "name", "notfound_action" ], "start_line": 143, "end_line": 191, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 49, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , default_lib_dirs = default_lib_dirs , default_include_dirs = default_include_dirs , verbosity = 1 , )", "filename": "system_info.py", "nloc": 26, "complexity": 3, "token_count": 210, "parameters": [ "self", "default_lib_dirs", "default_include_dirs", "verbosity" ], "start_line": 271, "end_line": 296, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "calc_libraries_info", "long_name": "calc_libraries_info( self )", "filename": "system_info.py", "nloc": 15, "complexity": 5, "token_count": 78, "parameters": [ "self" ], "start_line": 298, "end_line": 312, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 15, "top_nesting_level": 1 }, { "name": "set_info", "long_name": "set_info( self , ** info )", "filename": "system_info.py", "nloc": 5, "complexity": 2, "token_count": 37, "parameters": [ "self", "info" ], "start_line": 314, "end_line": 318, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "has_info", "long_name": "has_info( self )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 18, "parameters": [ "self" ], "start_line": 320, "end_line": 321, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_info", "long_name": "get_info( self , notfound_action = 0 )", "filename": "system_info.py", "nloc": 30, "complexity": 15, "token_count": 206, "parameters": [ "self", "notfound_action" ], "start_line": 323, "end_line": 358, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 47, "complexity": 20, "token_count": 405, "parameters": [ "self", "section", "key" ], "start_line": 360, "end_line": 406, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 47, "top_nesting_level": 1 }, { "name": "get_lib_dirs", "long_name": "get_lib_dirs( self , key = 'library_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 408, "end_line": 409, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_include_dirs", "long_name": "get_include_dirs( self , key = 'include_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 411, "end_line": 412, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_src_dirs", "long_name": "get_src_dirs( self , key = 'src_dirs' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "self", "key" ], "start_line": 414, "end_line": 415, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "get_libs", "long_name": "get_libs( self , key , default )", "filename": "system_info.py", "nloc": 10, "complexity": 7, "token_count": 79, "parameters": [ "self", "key", "default" ], "start_line": 417, "end_line": 426, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "get_libraries", "long_name": "get_libraries( self , key = 'libraries' )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 18, "parameters": [ "self", "key" ], "start_line": 428, "end_line": 429, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "check_libs", "long_name": "check_libs( self , lib_dir , libs , opt_libs = [ ] )", "filename": "system_info.py", "nloc": 11, "complexity": 5, "token_count": 77, "parameters": [ "self", "lib_dir", "libs", "opt_libs" ], "start_line": 431, "end_line": 443, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "_lib_list", "long_name": "_lib_list( self , lib_dir , libs , ext )", "filename": "system_info.py", "nloc": 9, "complexity": 3, "token_count": 65, "parameters": [ "self", "lib_dir", "libs", "ext" ], "start_line": 445, "end_line": 453, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "_extract_lib_names", "long_name": "_extract_lib_names( self , libs )", "filename": "system_info.py", "nloc": 3, "complexity": 2, "token_count": 37, "parameters": [ "self", "libs" ], "start_line": 455, "end_line": 457, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "_check_libs", "long_name": "_check_libs( self , lib_dir , libs , opt_libs , ext )", "filename": "system_info.py", "nloc": 10, "complexity": 3, "token_count": 99, "parameters": [ "self", "lib_dir", "libs", "opt_libs", "ext" ], "start_line": 459, "end_line": 468, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "combine_paths", "long_name": "combine_paths( self , * args )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 23, "parameters": [ "self", "args" ], "start_line": 470, "end_line": 471, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 2, "complexity": 1, "token_count": 11, "parameters": [ "self" ], "start_line": 481, "end_line": 482, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 26, "complexity": 8, "token_count": 150, "parameters": [ "self" ], "start_line": 484, "end_line": 509, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 551, "end_line": 556, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 22, "complexity": 7, "token_count": 140, "parameters": [ "self" ], "start_line": 558, "end_line": 579, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 7, "complexity": 4, "token_count": 74, "parameters": [ "self", "section", "key" ], "start_line": 594, "end_line": 600, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 78, "complexity": 17, "token_count": 441, "parameters": [ "self" ], "start_line": 602, "end_line": 682, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 81, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 21, "complexity": 6, "token_count": 138, "parameters": [ "self" ], "start_line": 687, "end_line": 709, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 12, "complexity": 3, "token_count": 68, "parameters": [ "self" ], "start_line": 732, "end_line": 744, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 68, "parameters": [ "self", "section", "key" ], "start_line": 751, "end_line": 756, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 81, "complexity": 10, "token_count": 232, "parameters": [ "self" ], "start_line": 758, "end_line": 842, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 85, "top_nesting_level": 1 }, { "name": "get_atlas_version.atlas_version_c", "long_name": "get_atlas_version.atlas_version_c( extension , build_dir , magic = magic )", "filename": "system_info.py", "nloc": 10, "complexity": 3, "token_count": 74, "parameters": [ "extension", "build_dir", "magic" ], "start_line": 866, "end_line": 875, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 1 }, { "name": "get_atlas_version", "long_name": "get_atlas_version( ** config )", "filename": "system_info.py", "nloc": 45, "complexity": 9, "token_count": 289, "parameters": [ "config" ], "start_line": 861, "end_line": 915, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 55, "top_nesting_level": 0 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 68, "complexity": 19, "token_count": 434, "parameters": [ "self" ], "start_line": 920, "end_line": 995, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 76, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 49, "complexity": 13, "token_count": 316, "parameters": [ "self" ], "start_line": 1000, "end_line": 1052, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 53, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 12, "complexity": 3, "token_count": 68, "parameters": [ "self" ], "start_line": 1061, "end_line": 1073, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1081, "end_line": 1086, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 34, "complexity": 5, "token_count": 106, "parameters": [ "self" ], "start_line": 1088, "end_line": 1123, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 36, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 4, "complexity": 1, "token_count": 19, "parameters": [ "self" ], "start_line": 1129, "end_line": 1132, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 20, "complexity": 7, "token_count": 114, "parameters": [ "self" ], "start_line": 1134, "end_line": 1153, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 1 }, { "name": "__init__", "long_name": "__init__( self )", "filename": "system_info.py", "nloc": 22, "complexity": 6, "token_count": 139, "parameters": [ "self" ], "start_line": 1160, "end_line": 1181, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 22, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 22, "complexity": 6, "token_count": 121, "parameters": [ "self" ], "start_line": 1183, "end_line": 1211, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 29, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1221, "end_line": 1226, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 21, "complexity": 5, "token_count": 156, "parameters": [ "self" ], "start_line": 1228, "end_line": 1248, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 21, "top_nesting_level": 1 }, { "name": "get_paths", "long_name": "get_paths( self , section , key )", "filename": "system_info.py", "nloc": 6, "complexity": 4, "token_count": 66, "parameters": [ "self", "section", "key" ], "start_line": 1254, "end_line": 1259, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 22, "complexity": 6, "token_count": 177, "parameters": [ "self" ], "start_line": 1261, "end_line": 1283, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 23, "top_nesting_level": 1 }, { "name": "get_config_exe", "long_name": "get_config_exe( self )", "filename": "system_info.py", "nloc": 4, "complexity": 2, "token_count": 30, "parameters": [ "self" ], "start_line": 1295, "end_line": 1298, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "get_config_output", "long_name": "get_config_output( self , config_exe , option )", "filename": "system_info.py", "nloc": 4, "complexity": 2, "token_count": 37, "parameters": [ "self", "config_exe", "option" ], "start_line": 1299, "end_line": 1302, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 54, "complexity": 22, "token_count": 435, "parameters": [ "self" ], "start_line": 1304, "end_line": 1357, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 54, "top_nesting_level": 1 }, { "name": "combine_paths", "long_name": "combine_paths( * args , ** kws )", "filename": "system_info.py", "nloc": 22, "complexity": 11, "token_count": 195, "parameters": [ "args", "kws" ], "start_line": 1426, "end_line": 1450, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 0 }, { "name": "dict_append", "long_name": "dict_append( d , ** kws )", "filename": "system_info.py", "nloc": 17, "complexity": 9, "token_count": 128, "parameters": [ "d", "kws" ], "start_line": 1454, "end_line": 1470, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "show_all", "long_name": "show_all( )", "filename": "system_info.py", "nloc": 20, "complexity": 8, "token_count": 137, "parameters": [], "start_line": 1472, "end_line": 1491, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "calc_info", "long_name": "calc_info( self )", "filename": "system_info.py", "nloc": 23, "complexity": 6, "token_count": 133, "parameters": [ "self" ], "start_line": 1184, "end_line": 1213, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 30, "top_nesting_level": 1 }, { "name": "get_info", "long_name": "get_info( name , notfound_action = 0 )", "filename": "system_info.py", "nloc": 44, "complexity": 1, "token_count": 198, "parameters": [ "name", "notfound_action" ], "start_line": 143, "end_line": 192, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 50, "top_nesting_level": 0 } ], "nloc": 1322, "complexity": 285, "token_count": 7322, "diff_parsed": { "added": [ " 'numeric':numpy_info, # alias to numpy, for build_ext --backends support", " '\"\\\\\"%s\\\\\"\"' % (module.__version__)),", " (self.modulename.upper(),None)]" ], "deleted": [ " '\"\\\\\"%s\\\\\"\"' % (module.__version__))]" ] } } ] }, { "hash": "294234cfe5fb36ccbfca47da85ad4c77959fdaa8", "msg": "Update for build_ext --backends.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-03-24T09:24:34+00:00", "author_timezone": 0, "committer_date": "2005-03-24T09:24:34+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "f8ce14cdaa75a24670a531722a9cfdd425eaea55" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 31, "insertions": 5, "lines": 36, "files": 1, "dmm_unit_size": 0.782608695652174, "dmm_unit_complexity": 0.782608695652174, "dmm_unit_interfacing": 0.0, "modified_files": [ { "old_path": "scipy_base/setup_scipy_base.py", "new_path": "scipy_base/setup_scipy_base.py", "filename": "setup_scipy_base.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -15,8 +15,6 @@ def configuration(parent_package='',parent_path=None):\n local_path = get_path(__name__,parent_path)\n config = default_config_dict(package,parent_package)\n \n- numpy_info = get_info('numpy',notfound_action=2)\n-\n # extra_compile_args -- trying to find something that is binary compatible\n # with msvc for returning Py_complex from functions\n extra_compile_args=[]\n@@ -51,43 +49,19 @@ def configuration(parent_package='',parent_path=None):\n 'libraries': libraries,\n 'extra_compile_args': extra_compile_args,\n 'depends': umath_c_sources}\n- dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n \n+ # _compiled_base module\n \n- # _nc_compiled_base and _na_compiled_base modules\n-\n- _compiled_base_c = os.path.join(local_path,'_compiled_base.c')\n- def compiled_base_c(ext,src_dir):\n- source = os.path.join(src_dir,ext.name.split('.')[-1] + '.c')\n- if newer(_compiled_base_c,source):\n- copy_file(_compiled_base_c,source)\n- return [source]\n-\n+ sources = ['_compiled_base.c']\n+ sources = [os.path.join(local_path,x) for x in sources]\n ext_args = {}\n dict_append(ext_args,\n- name=dot_join(package,'_nc_compiled_base'),\n- sources = [compiled_base_c],\n- depends = [_compiled_base_c],\n- define_macros = [('NUMERIC',None)],\n- include_dirs = [local_path]\n+ name=dot_join(package,'_na_compiled_base'),\n+ sources = sources,\n )\n- dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n \n- numarray_info = get_info('numarray')\n- if numarray_info:\n- ext_args = {}\n- dict_append(ext_args,\n- name=dot_join(package,'_na_compiled_base'),\n- sources = [compiled_base_c],\n- depends = [_compiled_base_c],\n- define_macros = [('NUMARRAY',None)],\n- include_dirs = [local_path]\n- )\n- dict_append(ext_args,**numarray_info)\n- config['ext_modules'].append(Extension(**ext_args))\n-\n \n # display_test module\n sources = [os.path.join(local_path,'src','display_test.c')]\n", "added_lines": 5, "deleted_lines": 31, "source_code": "#!/usr/bin/env python\nimport os, sys\nfrom glob import glob\n \ndef configuration(parent_package='',parent_path=None):\n from scipy_distutils.system_info import get_info, dict_append\n from scipy_distutils.core import Extension\n from scipy_distutils.misc_util import get_path,default_config_dict,dot_join\n from scipy_distutils.misc_util import get_path,default_config_dict,\\\n dot_join,SourceGenerator\n from distutils.dep_util import newer\n from distutils.file_util import copy_file\n\n package = 'scipy_base'\n local_path = get_path(__name__,parent_path)\n config = default_config_dict(package,parent_package)\n\n # extra_compile_args -- trying to find something that is binary compatible\n # with msvc for returning Py_complex from functions\n extra_compile_args=[]\n \n # fastumath module\n # scipy_base.fastumath module\n umath_c_sources = ['fastumathmodule.c',\n 'fastumath_unsigned.inc',\n 'fastumath_nounsigned.inc',\n '_scipy_mapping.c',\n '_scipy_number.c']\n umath_c_sources = [os.path.join(local_path,x) for x in umath_c_sources]\n umath_c = os.path.join(local_path,'fastumathmodule.c')\n sources = [umath_c, os.path.join(local_path,'isnan.c')]\n define_macros = []\n undef_macros = []\n libraries = []\n if sys.byteorder == \"little\":\n define_macros.append(('USE_MCONF_LITE_LE',None))\n else:\n define_macros.append(('USE_MCONF_LITE_BE',None))\n if sys.platform in ['win32']:\n undef_macros.append('HAVE_INVERSE_HYPERBOLIC')\n else:\n libraries.append('m')\n define_macros.append(('HAVE_INVERSE_HYPERBOLIC',None))\n\n ext_args = {'name':dot_join(package,'fastumath'),\n 'sources':sources,\n 'define_macros': define_macros,\n 'undef_macros': undef_macros,\n 'libraries': libraries,\n 'extra_compile_args': extra_compile_args,\n 'depends': umath_c_sources}\n config['ext_modules'].append(Extension(**ext_args))\n\n # _compiled_base module\n\n sources = ['_compiled_base.c']\n sources = [os.path.join(local_path,x) for x in sources]\n ext_args = {}\n dict_append(ext_args,\n name=dot_join(package,'_na_compiled_base'),\n sources = sources,\n )\n config['ext_modules'].append(Extension(**ext_args))\n\n\n # display_test module\n sources = [os.path.join(local_path,'src','display_test.c')]\n x11 = get_info('x11')\n if x11:\n x11['define_macros'] = [('HAVE_X11',None)]\n ext = Extension(dot_join(package,'display_test'), sources, **x11)\n config['ext_modules'].append(ext)\n\n return config\n\nif __name__ == '__main__':\n from scipy_base_version import scipy_base_version\n print 'scipy_base Version',scipy_base_version\n from scipy_distutils.core import setup\n\n setup(version = scipy_base_version,\n maintainer = \"SciPy Developers\",\n maintainer_email = \"scipy-dev@scipy.org\",\n description = \"SciPy base module\",\n url = \"http://www.scipy.org\",\n license = \"SciPy License (BSD Style)\",\n **configuration(parent_path='')\n )\n", "source_code_before": "#!/usr/bin/env python\nimport os, sys\nfrom glob import glob\n \ndef configuration(parent_package='',parent_path=None):\n from scipy_distutils.system_info import get_info, dict_append\n from scipy_distutils.core import Extension\n from scipy_distutils.misc_util import get_path,default_config_dict,dot_join\n from scipy_distutils.misc_util import get_path,default_config_dict,\\\n dot_join,SourceGenerator\n from distutils.dep_util import newer\n from distutils.file_util import copy_file\n\n package = 'scipy_base'\n local_path = get_path(__name__,parent_path)\n config = default_config_dict(package,parent_package)\n\n numpy_info = get_info('numpy',notfound_action=2)\n\n # extra_compile_args -- trying to find something that is binary compatible\n # with msvc for returning Py_complex from functions\n extra_compile_args=[]\n \n # fastumath module\n # scipy_base.fastumath module\n umath_c_sources = ['fastumathmodule.c',\n 'fastumath_unsigned.inc',\n 'fastumath_nounsigned.inc',\n '_scipy_mapping.c',\n '_scipy_number.c']\n umath_c_sources = [os.path.join(local_path,x) for x in umath_c_sources]\n umath_c = os.path.join(local_path,'fastumathmodule.c')\n sources = [umath_c, os.path.join(local_path,'isnan.c')]\n define_macros = []\n undef_macros = []\n libraries = []\n if sys.byteorder == \"little\":\n define_macros.append(('USE_MCONF_LITE_LE',None))\n else:\n define_macros.append(('USE_MCONF_LITE_BE',None))\n if sys.platform in ['win32']:\n undef_macros.append('HAVE_INVERSE_HYPERBOLIC')\n else:\n libraries.append('m')\n define_macros.append(('HAVE_INVERSE_HYPERBOLIC',None))\n\n ext_args = {'name':dot_join(package,'fastumath'),\n 'sources':sources,\n 'define_macros': define_macros,\n 'undef_macros': undef_macros,\n 'libraries': libraries,\n 'extra_compile_args': extra_compile_args,\n 'depends': umath_c_sources}\n dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n\n # _nc_compiled_base and _na_compiled_base modules\n\n _compiled_base_c = os.path.join(local_path,'_compiled_base.c')\n def compiled_base_c(ext,src_dir):\n source = os.path.join(src_dir,ext.name.split('.')[-1] + '.c')\n if newer(_compiled_base_c,source):\n copy_file(_compiled_base_c,source)\n return [source]\n\n ext_args = {}\n dict_append(ext_args,\n name=dot_join(package,'_nc_compiled_base'),\n sources = [compiled_base_c],\n depends = [_compiled_base_c],\n define_macros = [('NUMERIC',None)],\n include_dirs = [local_path]\n )\n dict_append(ext_args,**numpy_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n numarray_info = get_info('numarray')\n if numarray_info:\n ext_args = {}\n dict_append(ext_args,\n name=dot_join(package,'_na_compiled_base'),\n sources = [compiled_base_c],\n depends = [_compiled_base_c],\n define_macros = [('NUMARRAY',None)],\n include_dirs = [local_path]\n )\n dict_append(ext_args,**numarray_info)\n config['ext_modules'].append(Extension(**ext_args))\n\n\n # display_test module\n sources = [os.path.join(local_path,'src','display_test.c')]\n x11 = get_info('x11')\n if x11:\n x11['define_macros'] = [('HAVE_X11',None)]\n ext = Extension(dot_join(package,'display_test'), sources, **x11)\n config['ext_modules'].append(ext)\n\n return config\n\nif __name__ == '__main__':\n from scipy_base_version import scipy_base_version\n print 'scipy_base Version',scipy_base_version\n from scipy_distutils.core import setup\n\n setup(version = scipy_base_version,\n maintainer = \"SciPy Developers\",\n maintainer_email = \"scipy-dev@scipy.org\",\n description = \"SciPy base module\",\n url = \"http://www.scipy.org\",\n license = \"SciPy License (BSD Style)\",\n **configuration(parent_path='')\n )\n", "methods": [ { "name": "configuration", "long_name": "configuration( parent_package = '' , parent_path = None )", "filename": "setup_scipy_base.py", "nloc": 55, "complexity": 6, "token_count": 387, "parameters": [ "parent_package", "parent_path" ], "start_line": 5, "end_line": 74, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 70, "top_nesting_level": 0 } ], "methods_before": [ { "name": "configuration.compiled_base_c", "long_name": "configuration.compiled_base_c( ext , src_dir )", "filename": "setup_scipy_base.py", "nloc": 5, "complexity": 2, "token_count": 50, "parameters": [ "ext", "src_dir" ], "start_line": 61, "end_line": 65, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "configuration", "long_name": "configuration( parent_package = '' , parent_path = None )", "filename": "setup_scipy_base.py", "nloc": 73, "complexity": 6, "token_count": 499, "parameters": [ "parent_package", "parent_path" ], "start_line": 5, "end_line": 100, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 96, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "configuration", "long_name": "configuration( parent_package = '' , parent_path = None )", "filename": "setup_scipy_base.py", "nloc": 55, "complexity": 6, "token_count": 387, "parameters": [ "parent_package", "parent_path" ], "start_line": 5, "end_line": 74, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 70, "top_nesting_level": 0 }, { "name": "configuration.compiled_base_c", "long_name": "configuration.compiled_base_c( ext , src_dir )", "filename": "setup_scipy_base.py", "nloc": 5, "complexity": 2, "token_count": 50, "parameters": [ "ext", "src_dir" ], "start_line": 61, "end_line": 65, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 } ], "nloc": 69, "complexity": 6, "token_count": 449, "diff_parsed": { "added": [ " # _compiled_base module", " sources = ['_compiled_base.c']", " sources = [os.path.join(local_path,x) for x in sources]", " name=dot_join(package,'_na_compiled_base'),", " sources = sources," ], "deleted": [ " numpy_info = get_info('numpy',notfound_action=2)", "", " dict_append(ext_args,**numpy_info)", " # _nc_compiled_base and _na_compiled_base modules", "", " _compiled_base_c = os.path.join(local_path,'_compiled_base.c')", " def compiled_base_c(ext,src_dir):", " source = os.path.join(src_dir,ext.name.split('.')[-1] + '.c')", " if newer(_compiled_base_c,source):", " copy_file(_compiled_base_c,source)", " return [source]", "", " name=dot_join(package,'_nc_compiled_base'),", " sources = [compiled_base_c],", " depends = [_compiled_base_c],", " define_macros = [('NUMERIC',None)],", " include_dirs = [local_path]", " dict_append(ext_args,**numpy_info)", " numarray_info = get_info('numarray')", " if numarray_info:", " ext_args = {}", " dict_append(ext_args,", " name=dot_join(package,'_na_compiled_base'),", " sources = [compiled_base_c],", " depends = [_compiled_base_c],", " define_macros = [('NUMARRAY',None)],", " include_dirs = [local_path]", " )", " dict_append(ext_args,**numarray_info)", " config['ext_modules'].append(Extension(**ext_args))", "" ] } } ] }, { "hash": "177e7494f761d3f242f92adc008bf2e7172bcf2f", "msg": "Fixed _backend_compiled_base names to _compiled_base.", "author": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "committer": { "name": "Pearu Peterson", "email": "pearu.peterson@gmail.com" }, "author_date": "2005-03-24T09:38:31+00:00", "author_timezone": 0, "committer_date": "2005-03-24T09:38:31+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "294234cfe5fb36ccbfca47da85ad4c77959fdaa8" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 19, "insertions": 25, "lines": 44, "files": 4, "dmm_unit_size": 1.0, "dmm_unit_complexity": 1.0, "dmm_unit_interfacing": 1.0, "modified_files": [ { "old_path": "scipy_base/_compiled_base.c", "new_path": "scipy_base/_compiled_base.c", "filename": "_compiled_base.c", "extension": "c", "change_type": "MODIFY", "diff": "@@ -712,19 +712,11 @@ static struct PyMethodDef methods[] = {\n \n /* Initialization function for the module (*must* be called initArray) */\n \n-#if defined(NUMERIC)\n-DL_EXPORT(void) init_nc_compiled_base(void) {\n-#else\n-DL_EXPORT(void) init_na_compiled_base(void) {\n-#endif\n+DL_EXPORT(void) init_compiled_base(void) {\n PyObject *m, *d, *s;\n \n /* Create the module and add the functions */\n-#if defined(NUMERIC)\n- m = Py_InitModule(\"_nc_compiled_base\", methods); \n-#else\n- m = Py_InitModule(\"_na_compiled_base\", methods); \n-#endif\n+ m = Py_InitModule(\"_compiled_base\", methods); \n \n /* Import the array and ufunc objects */\n import_array();\n", "added_lines": 2, "deleted_lines": 10, "source_code": "#include \"Python.h\"\n#include \"numerix.h\"\n\n/* Copy of PyArray_Converter... */\nstatic int \n_Converter(PyObject *object, PyObject **address) {\n\tif (PyArray_Check(object)) {\n\t\t*address = object;\n\t\treturn 1;\n\t}\n\telse {\n\t\tPyErr_SetString(\n\t\t\tPyExc_TypeError,\n\t\t\t\"expected Array object in one of the arguments\");\n\t\treturn 0;\n\t}\n}\n\nstatic char doc_base_unique[] = \"Return the unique elements of a 1-D sequence.\";\nstatic PyObject *base_unique(PyObject *self, PyObject *args, PyObject *kwdict)\n{\n /* Returns a 1-D array containing the unique elements of a 1-D sequence.\n */\n\n void *new_mem=NULL;\n PyArrayObject *ainput=NULL, *aoutput=NULL;\n int asize, abytes, new;\n int copied=0, nd;\n int instride=0, elsize, k, j, dims[1];\n char *ip, *op; /* Current memory buffer */\n char *op2;\n \n static char *kwlist[] = {\"input\", NULL};\n\n if (!PyArg_ParseTupleAndKeywords(\n\t args, kwdict, \"O&\", kwlist, _Converter, &ainput)) \n\t return NULL;\n \n if (ainput->nd > 1) {\n PyErr_SetString(PyExc_ValueError, \"Input array must be < 1 dimensional\");\n return NULL;\n }\n asize = PyArray_SIZE(ainput);\n elsize = ainput->descr->elsize;\n abytes = asize * elsize;\n nd = ainput->nd;\n if (nd > 0) {\n instride = ainput->strides[0];\n }\n\n new_mem = (void *)PyMem_Malloc((size_t) abytes);\n if (new_mem == NULL) {\n return PyErr_NoMemory();\n }\n \n ip = ainput->data;\n op = new_mem;\n for (k=0; k < asize; k++,ip+=instride) {\n new = 1; /* Assume it is new */\n op2 = new_mem;\n for (j=0; j < copied; j++,op2+=elsize) {\n if (memcmp(op2,ip,elsize) == 0) { /* Is a match found? */\n new = 0;\n break;\n }\n }\n /* No match found, copy this one over */\n if (new) {\n memcpy(op,ip,elsize);\n copied += 1;\n op += elsize; /* Get ready to put next match */\n }\n }\n\n dims[0] = copied;\n /* Make output array */\n if ((aoutput = (PyArrayObject *)PyArray_FromDims(nd, \n dims, ainput->descr->type_num))==NULL) goto fail;\n\n memcpy(aoutput->data,new_mem,elsize*copied);\n /* Reallocate memory to new-size */\n PyMem_Free(new_mem);\n return PyArray_Return(aoutput); \n \n fail:\n if (new_mem != NULL) PyMem_Free(new_mem);\n Py_XDECREF(aoutput);\n return NULL;\n}\n\n\nstatic char doc_base_insert[] = \"Insert vals sequenctially into equivalent 1-d positions indicated by mask.\";\n\nstatic PyObject *base_insert(PyObject *self, PyObject *args, PyObject *kwdict)\n{\n /* Returns input array with values inserted sequentially into places \n indicated by the mask\n */\n PyObject *mask=NULL, *vals=NULL;\n PyArrayObject *ainput=NULL, *amask=NULL, *avals=NULL, *avalscast=NULL, *tmp=NULL;\n int numvals, totmask, sameshape;\n char *input_data, *mptr, *vptr, *zero;\n int melsize, delsize, copied, nd;\n int *instrides, *inshape;\n int mindx, rem_indx, indx, i, k, objarray;\n \n static char *kwlist[] = {\"input\",\"mask\",\"vals\",NULL};\n\n if (!PyArg_ParseTupleAndKeywords(\n\t args, kwdict, \"O&OO\", kwlist, _Converter, &ainput, &mask, &vals))\n return NULL;\n\n /* Fixed problem with OBJECT ARRAYS\n if (ainput->descr->type_num == PyArray_OBJECT) {\n PyErr_SetString(PyExc_ValueError, \"Not currently supported for Object arrays.\");\n return NULL;\n }\n */\n\n amask = (PyArrayObject *) PyArray_ContiguousFromObject(\n\t mask, PyArray_NOTYPE, 0, 0);\n if (amask == NULL) return NULL;\n /* Cast an object array */\n if (amask->descr->type_num == PyArray_OBJECT) {\n tmp = (PyArrayObject *)PyArray_Cast(amask, PyArray_LONG);\n if (tmp == NULL) goto fail;\n Py_DECREF(amask);\n amask = tmp;\n }\n\n sameshape = 1;\n if (amask->nd == ainput->nd) {\n for (k=0; k < amask->nd; k++) \n if (amask->dimensions[k] != ainput->dimensions[k])\n sameshape = 0;\n }\n else { /* Test to see if amask is 1d */\n if (amask->nd != 1) sameshape = 0;\n else if ((PyArray_SIZE(ainput)) != PyArray_SIZE(amask)) sameshape = 0;\n }\n if (!sameshape) {\n PyErr_SetString(PyExc_ValueError, \"Mask array must be 1D or same shape as input array.\");\n goto fail;\n }\n\n avals = (PyArrayObject *)PyArray_FromObject(vals, PyArray_NOTYPE, 0, 1);\n if (avals == NULL) goto fail;\n avalscast = (PyArrayObject *)PyArray_Cast(avals, ainput->descr->type_num);\n if (avalscast == NULL) goto fail;\n Py_DECREF(avals);\n\n numvals = PyArray_SIZE(avalscast);\n nd = ainput->nd;\n input_data = ainput->data;\n mptr = amask->data;\n melsize = amask->descr->elsize;\n vptr = avalscast->data;\n delsize = avalscast->descr->elsize;\n zero = NX_ZERO(amask);\n objarray = (ainput->descr->type_num == PyArray_OBJECT);\n \n /* Handle zero-dimensional case separately */\n if (nd == 0) {\n if (memcmp(mptr,zero,melsize) != 0) {\n /* Copy value element over to input array */\n memcpy(input_data,vptr,delsize);\n if (objarray) Py_INCREF(*((PyObject **)vptr));\n }\n Py_DECREF(amask);\n Py_DECREF(avalscast);\n Py_INCREF(Py_None);\n return Py_None;\n }\n\n /* Walk through mask array, when non-zero is encountered\n copy next value in the vals array to the input array.\n If we get through the value array, repeat it as necessary. \n */\n totmask = PyArray_SIZE(amask);\n copied = 0;\n instrides = ainput->strides;\n inshape = ainput->dimensions;\n for (mindx = 0; mindx < totmask; mindx++) { \n if (memcmp(mptr,zero,melsize) != 0) { \n /* compute indx into input array \n */\n rem_indx = mindx;\n indx = 0;\n for(i=nd-1; i > 0; --i) {\n indx += (rem_indx % inshape[i]) * instrides[i];\n rem_indx /= inshape[i];\n }\n indx += rem_indx * instrides[0];\n /* fprintf(stderr, \"mindx = %d, indx=%d\\n\", mindx, indx); */\n /* Copy value element over to input array */\n memcpy(input_data+indx,vptr,delsize);\n if (objarray) Py_INCREF(*((PyObject **)vptr));\n vptr += delsize;\n copied += 1;\n /* If we move past value data. Reset */\n if (copied >= numvals) vptr = avalscast->data;\n }\n mptr += melsize;\n }\n\n Py_DECREF(amask);\n Py_DECREF(avalscast);\n Py_INCREF(Py_None);\n return Py_None;\n \n fail:\n Py_XDECREF(amask);\n Py_XDECREF(avals);\n Py_XDECREF(avalscast);\n return NULL;\n}\n\n\n/* Decrement the reference count of all objects in **arrays. */\nstatic void cleanup_arrays(PyArrayObject **arrays, int number)\n{\n int k;\n for (k=0; k < number; k++)\n Py_XDECREF((PyObject *)arrays[k]);\n return;\n}\n\n/* All rank-0 arrays are converted to rank-1 arrays */\n/* The number of dimensions of each array with rank less than\n the rank of the array with the most dimensions is increased by \n prepending with a dimenson length of one so that all arrays have\n the same rank. */\n/* Dimensions are checked and unmatched dimensions triggers an error */\n/* Strides for dimensions whose real length is one is set to zero but the dimension\n length is set to the maximum dimensions for the collection of inputs */\nstatic int setup_input_arrays(PyTupleObject *inputs, PyArrayObject **inputarrays, int nin)\n{\n int i, k;\n int maxrank=1;\n int *maxdims;\n PyObject *inputobj;\n PyArrayObject *ain, *tmparray;\n\n /* Convert nested sequences to arrays or just increase reference count\n if already an array */\n for (i=0; i < nin; i++) {\n ain = NULL;\n inputobj = PyTuple_GET_ITEM(inputs,i);\n ain = (PyArrayObject *)PyArray_FromObject(inputobj,PyArray_ObjectType(inputobj,0),0,0);\n if (NULL == ain) {\n cleanup_arrays(inputarrays,i);\n return -1;\n }\n if (PyArray_SIZE(ain)==0) {\n cleanup_arrays(inputarrays,i);\n PyErr_SetString(PyExc_IndexError,\"arraymap: Input arrays of zero-dimensions not supported.\");\n return -1;\n }\n if (ain->nd > maxrank) maxrank = ain->nd;\n if (ain->nd == 0) { /* turn into 1-d array */\n /* convert to rank-1 array */\n if ((ain->dimensions = (int *)malloc(sizeof(int))) == NULL) {\n PyErr_SetString(PyExc_MemoryError, \"arraymap: can't allocate memory for input arrays\");\n cleanup_arrays(inputarrays,i);\n return -1;\n }\n if ((ain->strides = (int *)malloc(sizeof(int))) == NULL) {\n PyErr_SetString(PyExc_MemoryError, \"arraymap: can't allocate memory for input arrays\");\n cleanup_arrays(inputarrays,i);\n free(ain->dimensions);\n return -1;\n }\n ain->nd = 1;\n ain->dimensions[0] = 1;\n ain->strides[0] = ain->descr->elsize;\n }\n inputarrays[i] = ain;\n }\n\n maxdims = (int*)malloc(2*sizeof(int)*maxrank);\n if (NULL == maxdims) {\n PyErr_SetString(PyExc_MemoryError, \"arraymap: can't allocate memory for input arrays\");\n cleanup_arrays(inputarrays,nin);\n return -1;\n }\n\n\n /* Reshape all arrays so they have the same rank (pre-pend with length 1 dimensions) */\n /* We want to replace the header information without copying the data. \n Keeping the reference count correct can be tricky.\n We want to make a new array object with a different header and decrease the \n reference count of the old one without deallocating the data section */\n for (i=0; i < nin; i++) {\n ain = inputarrays[i];\n\n /* Initialize all dimensions to 1 */\n /* Change array shape */\n for (k=0; k < maxrank; k++) \n maxdims[k] = 1; \n for (k=maxrank-ain->nd; k< maxrank; k++) \n maxdims[k] = ain->dimensions[k-maxrank+ain->nd];\n\n tmparray = (PyArrayObject *)PyArray_FromDimsAndData(maxrank,maxdims,ain->descr->type,ain->data);\n if (NULL == tmparray) {\n free(maxdims);\n cleanup_arrays(inputarrays,nin);\n return -1;\n }\n /* tmparray->strides will be zero where new dimensions were added */\n memmove(tmparray->strides+(maxrank-ain->nd),ain->strides,sizeof(int)*ain->nd);\n tmparray->base = (PyObject *)ain; /* When tmparray is deallocated ain will be too */\n inputarrays[i] = tmparray; /* tmparray is new array */\n }\n\n /* Find dimension length for the output arrays (maximum length for each\n dimension) */\n for (k=0; k < maxrank; k++) { \n maxdims[k] = 1;\n for (i=0; i < nin; i++) \n if (inputarrays[i]->dimensions[k] > maxdims[k])\n\tmaxdims[k] = inputarrays[i]->dimensions[k];\n }\n\n\n /* Now set all lengths for input array dimensions to maxdims \n and make strides equal to zero for arrays whose\n real length is 1 for a particular dimension\n */\n\n for (i=0; idimensions[k]) {\n\tain->strides[k] = 0;\n\tain->dimensions[k] = maxdims[k];\n }\n else if (ain->dimensions[k] != maxdims[k]) {\n\tPyErr_SetString(PyExc_ValueError,\"arraymap: Frames are not aligned (mismatched dimensions).\");\n\tcleanup_arrays(inputarrays,nin);\n\tfree(maxdims);\n\treturn -1;\n }\n }\n }\n\n free(maxdims);\n return 0;\n\n}\n\nstatic int type_from_object(PyObject *obj)\n{\n if (PyArray_Check(obj))\n return ((PyArrayObject *)obj)->descr->type_num;\n if (PyComplex_Check(obj)) return PyArray_CDOUBLE;\n if (PyFloat_Check(obj)) return PyArray_DOUBLE;\n if (PyInt_Check(obj) || PyLong_Check(obj)) return PyArray_LONG;\n PyErr_SetString(PyExc_ValueError, \"arraymap: Invalid type for output array.\");\n return -1;\n}\n\nstatic int type_from_char(char typechar)\n{\n switch(typechar) {\n case 'c': return PyArray_CHAR;\n case 'b': return PyArray_UBYTE;\n case '1': return PyArray_SBYTE;\n case 's': return PyArray_SHORT;\n case 'i': return PyArray_INT;\n#ifdef PyArray_UNSIGNED_TYPES\n case 'w': return PyArray_USHORT;\n case 'u': return PyArray_UINT;\n#endif\n case 'l': return PyArray_LONG;\n case 'f': return PyArray_FLOAT;\n case 'd': return PyArray_DOUBLE;\n case 'F': return PyArray_CFLOAT;\n case 'D': return PyArray_CDOUBLE;\n default:\n PyErr_SetString(PyExc_ValueError, \"arraymap: Invalid type for array\");\n return -1;\n }\n}\n\n/* This sets up the output arrays by calling the function with arguments \n the first element of each input arrays. If otypes is NULL, the\n returned value type is used to establish the type of the output\n arrays, otherwise the characters in otypes determine the\n output types */\nstatic int setup_output_arrays(PyObject *func, PyArrayObject **inarr, int nin, PyArrayObject ***outarr, char *otypes, int numtypes)\n{\n PyObject *arglist, *result;\n PyObject *tmpobject;\n PyArrayObject *tmparr;\n int i, nout;\n int nd, *dimensions, type_num;\n\n nd = inarr[0]->nd;\n dimensions = inarr[0]->dimensions;\n\n if ((numtypes == 0) || (otypes == NULL)) { \n /* Call function to get number of outputs */\n\n /* Build argument list */\n if ((arglist = PyTuple_New(nin)) == NULL) {\n return -1;\n }\n /* Construct input argument by creating a tuple with an element\n from each input array (cast to an appropriate Python Object) */\n for (i=0; i < nin; i++) {\n tmparr = inarr[i];\n /* Get first data point */\n tmpobject = NX_GETITEM(tmparr, tmparr->data);\n if (NULL == tmpobject) {\n\tPy_DECREF(arglist);\n\treturn -1;\n }\n PyTuple_SET_ITEM(arglist, i, tmpobject); /* arg1 owns reference to tmpobj now */\n } \n /* Call Python Function */\n if ((result=PyEval_CallObject(func, arglist))==NULL) {\n Py_DECREF(arglist);\n return -1;\n }\n\n Py_DECREF(arglist);\n\n /* If result is a tuple, create output_arrays according \n to output. */\n if (PyTuple_Check(result)) {\n nout = PyTuple_GET_SIZE(result);\n *outarr = (PyArrayObject **)malloc(nout*sizeof(PyArrayObject *));\n if (NULL == *outarr) {\n\tPyErr_SetString(PyExc_MemoryError, \"arraymap: Cannot allocate memory for output arrays.\");\n\tPy_DECREF(result);\n\treturn -1;\n }\n /* Create nout output arrays */\n for (i=0; i < nout; i++) {\n\t/* Determine type */\n\tif ((type_num=type_from_object(PyTuple_GET_ITEM(result, i)))==-1) {\n\t cleanup_arrays(*outarr,i);\n\t Py_DECREF(result);\n\t free(*outarr);\n\t return -1;\n\t}\n\t/* Create output array */\n\t(*outarr)[i] = (PyArrayObject *)PyArray_FromDims(nd,dimensions,type_num);\n\tif (NULL == (*outarr)[i]) {\n\t cleanup_arrays(*outarr,i);\n\t Py_DECREF(result);\n\t free(*outarr);\n\t return -1;\n\t}\n }\n }\n else { /* Only a single output result */\n nout = 1;\n *outarr = (PyArrayObject **)malloc(nout*sizeof(PyArrayObject *));\n if (NULL==*outarr) {\n\tPyErr_SetString(PyExc_MemoryError,\"arraymap: Cannot allocate memory for output arrays.\");\n\tPy_DECREF(result);\n\treturn -1;\n }\n if ((type_num = type_from_object(result))==-1) {\n\tPy_DECREF(result);\n\tfree(*outarr);\n\treturn -1;\n }\n (*outarr)[0] = (PyArrayObject *)PyArray_FromDims(nd,dimensions,type_num);\n if (NULL == (*outarr)[0]) {\n\tPy_DECREF(result);\n\tfree(*outarr);\n\treturn -1;\n }\n }\n Py_DECREF(result);\n }\n\n else { /* Character output types entered */\n nout = numtypes;\n *outarr = (PyArrayObject **)malloc(nout*sizeof(PyArrayObject *));\n if (NULL==*outarr) {\n PyErr_SetString(PyExc_MemoryError,\"arraymap: Cannot allocate memory for output arrays.\");\n return -1;\n }\n /* Create Output arrays */\n for (i=0; i < nout; i++) {\n /* Get type */\n if ((type_num = type_from_char(otypes[i]))==-1) {\n\tcleanup_arrays(*outarr,i);\n\tfree(*outarr);\n\treturn -1;\n }\n /* Create array */\n (*outarr)[i] = (PyArrayObject *)PyArray_FromDims(nd,dimensions,type_num);\n if (NULL == (*outarr)[i]) {\n\tcleanup_arrays(*outarr,i);\n\tfree(*outarr);\n\treturn -1;\n }\n } \n } \n return nout;\n}\n\n\n/* Corresponding dimensions are assumed to match, check before calling. */\n/* No rank-0 arrays (make them rank-1 arrays) */\n\n/* This replicates the standard Ufunc broadcasting rule that if the\n dimension length is one, incrementing does not occur for that dimension. \n\n This is currently done by setting the stride in that dimension to\n zero during input array setup.\n\n The purpose of this function is to perform a for loop over arbitrary\n discontiguous N-D arrays, call the Python function for each set of \n corresponding elements and place the results in the output_array.\n*/ \n#define INCREMENT(ret_ind, nd, max_ind) \\\n{ \\\n int k; \\\n k = (nd) - 1; \\\n if (++(ret_ind)[k] >= (max_ind)[k]) { \\\n while (k >= 0 && ((ret_ind)[k] >= (max_ind)[k]-1)) \\\n (ret_ind)[k--] = 0; \\\n if (k >= 0) (ret_ind)[k]++; \\\n else (ret_ind)[0] = (max_ind)[0]; \\\n } \\\n}\n\n#define CALCINDEX(indx, nd_index, strides, ndim) \\\n{ \\\n int i; \\\n \\\n indx = 0; \\\n for (i=0; i < (ndim); i++) \\\n indx += (nd_index)[i]*(strides)[i]; \\\n} \n\nstatic int loop_over_arrays(PyObject *func, PyArrayObject **inarr, int nin, PyArrayObject **outarr, int nout)\n{\n int i, loop_index;\n int *nd_index, indx_in, indx_out;\n PyArrayObject *in, *out, *tmparr;\n PyObject *result, *tmpobj, *arglist;\n\n in = inarr[0]; /* For any shape information needed */\n out = outarr[0];\n /* Allocate the N-D index initalized to zero. */\n nd_index = (int *)calloc(in->nd,sizeof(int));\n if (NULL == nd_index) {\n PyErr_SetString(PyExc_MemoryError,\"arraymap: Cannot allocate memory for arrays.\");\n return -1;\n }\n /* Build argument list */\n if ((arglist = PyTuple_New(nin)) == NULL) {\n free(nd_index);\n return -1;\n }\n\n loop_index = PyArray_Size((PyObject *)in); /* Total number of Python function calls */\n\n while(loop_index--) { \n\t /* Create input argument list with current element from the input\n\t arrays \n\t */\n\t for (i=0; i < nin; i++) {\n\t\t \n\t\t tmparr = inarr[i];\n\t\t /* Find linear index into this input array */\n\t\t CALCINDEX(indx_in,nd_index,tmparr->strides,in->nd);\n\t\t /* Get object at this index */\n\t\t tmpobj = NX_GETITEM(tmparr, (tmparr->data+indx_in));\n\t\t if (NULL == tmpobj) {\n\t\t\t Py_DECREF(arglist);\n\t\t\t free(nd_index);\n\t\t\t return -1;\n\t\t }\n\t\t /* This steals reference of tmpobj */\n\t\t PyTuple_SET_ITEM(arglist, i, tmpobj); \n\t }\n\t \n\t /* Call Python Function for this set of inputs */\n\t if ((result=PyEval_CallObject(func, arglist))==NULL) {\n\t\t Py_DECREF(arglist);\n\t\t free(nd_index);\n\t\t return -1;\n\t } \n\t \n\t /* Find index into (all) output arrays */\n\t CALCINDEX(indx_out,nd_index,out->strides,out->nd);\n\t \n\t /* Copy the results to the output arrays */\n\t if (1==nout) {\n\t\t int rval = NX_SETITEM(\n\t\t\t outarr[0], (outarr[0]->data+indx_out), result);\n\t\t if (rval==-1) {\n\t\t\t free(nd_index);\n\t\t\t Py_DECREF(arglist);\n\t\t\t Py_DECREF(result);\n\t\t\t return -1;\n\t\t }\n\t }\n\t else if (PyTuple_Check(result)) {\n\t\t for (i=0; idata+indx_out), \n\t\t\t\t PyTuple_GET_ITEM(result,i));\n\t\t\t free(nd_index);\n\t\t\t Py_DECREF(arglist);\n\t\t\t Py_DECREF(result);\n\t\t\t return -1;\n\t\t }\n\t \n\t } else { \n\t\t PyErr_SetString(PyExc_ValueError,\"arraymap: Function output of incorrect type.\");\n\t\t free(nd_index);\n\t\t Py_DECREF(arglist);\n\t\t Py_DECREF(result);\n\t\t return -1;\n\t }\n\n\t /* Increment the index counter */\n\t INCREMENT(nd_index,in->nd,in->dimensions);\n\t Py_DECREF(result);\n\t \n }\n Py_DECREF(arglist);\n free(nd_index);\n return 0;\n} \n\nstatic PyObject *build_output(PyArrayObject **outarr,int nout)\n{\n int i;\n PyObject *out;\n\n if (1==nout) return PyArray_Return(outarr[0]);\n if ((out=PyTuple_New(nout))==NULL) return NULL;\n for (i=0; ind > 1) {\n PyErr_SetString(PyExc_ValueError, \"Input array must be < 1 dimensional\");\n return NULL;\n }\n asize = PyArray_SIZE(ainput);\n elsize = ainput->descr->elsize;\n abytes = asize * elsize;\n nd = ainput->nd;\n if (nd > 0) {\n instride = ainput->strides[0];\n }\n\n new_mem = (void *)PyMem_Malloc((size_t) abytes);\n if (new_mem == NULL) {\n return PyErr_NoMemory();\n }\n \n ip = ainput->data;\n op = new_mem;\n for (k=0; k < asize; k++,ip+=instride) {\n new = 1; /* Assume it is new */\n op2 = new_mem;\n for (j=0; j < copied; j++,op2+=elsize) {\n if (memcmp(op2,ip,elsize) == 0) { /* Is a match found? */\n new = 0;\n break;\n }\n }\n /* No match found, copy this one over */\n if (new) {\n memcpy(op,ip,elsize);\n copied += 1;\n op += elsize; /* Get ready to put next match */\n }\n }\n\n dims[0] = copied;\n /* Make output array */\n if ((aoutput = (PyArrayObject *)PyArray_FromDims(nd, \n dims, ainput->descr->type_num))==NULL) goto fail;\n\n memcpy(aoutput->data,new_mem,elsize*copied);\n /* Reallocate memory to new-size */\n PyMem_Free(new_mem);\n return PyArray_Return(aoutput); \n \n fail:\n if (new_mem != NULL) PyMem_Free(new_mem);\n Py_XDECREF(aoutput);\n return NULL;\n}\n\n\nstatic char doc_base_insert[] = \"Insert vals sequenctially into equivalent 1-d positions indicated by mask.\";\n\nstatic PyObject *base_insert(PyObject *self, PyObject *args, PyObject *kwdict)\n{\n /* Returns input array with values inserted sequentially into places \n indicated by the mask\n */\n PyObject *mask=NULL, *vals=NULL;\n PyArrayObject *ainput=NULL, *amask=NULL, *avals=NULL, *avalscast=NULL, *tmp=NULL;\n int numvals, totmask, sameshape;\n char *input_data, *mptr, *vptr, *zero;\n int melsize, delsize, copied, nd;\n int *instrides, *inshape;\n int mindx, rem_indx, indx, i, k, objarray;\n \n static char *kwlist[] = {\"input\",\"mask\",\"vals\",NULL};\n\n if (!PyArg_ParseTupleAndKeywords(\n\t args, kwdict, \"O&OO\", kwlist, _Converter, &ainput, &mask, &vals))\n return NULL;\n\n /* Fixed problem with OBJECT ARRAYS\n if (ainput->descr->type_num == PyArray_OBJECT) {\n PyErr_SetString(PyExc_ValueError, \"Not currently supported for Object arrays.\");\n return NULL;\n }\n */\n\n amask = (PyArrayObject *) PyArray_ContiguousFromObject(\n\t mask, PyArray_NOTYPE, 0, 0);\n if (amask == NULL) return NULL;\n /* Cast an object array */\n if (amask->descr->type_num == PyArray_OBJECT) {\n tmp = (PyArrayObject *)PyArray_Cast(amask, PyArray_LONG);\n if (tmp == NULL) goto fail;\n Py_DECREF(amask);\n amask = tmp;\n }\n\n sameshape = 1;\n if (amask->nd == ainput->nd) {\n for (k=0; k < amask->nd; k++) \n if (amask->dimensions[k] != ainput->dimensions[k])\n sameshape = 0;\n }\n else { /* Test to see if amask is 1d */\n if (amask->nd != 1) sameshape = 0;\n else if ((PyArray_SIZE(ainput)) != PyArray_SIZE(amask)) sameshape = 0;\n }\n if (!sameshape) {\n PyErr_SetString(PyExc_ValueError, \"Mask array must be 1D or same shape as input array.\");\n goto fail;\n }\n\n avals = (PyArrayObject *)PyArray_FromObject(vals, PyArray_NOTYPE, 0, 1);\n if (avals == NULL) goto fail;\n avalscast = (PyArrayObject *)PyArray_Cast(avals, ainput->descr->type_num);\n if (avalscast == NULL) goto fail;\n Py_DECREF(avals);\n\n numvals = PyArray_SIZE(avalscast);\n nd = ainput->nd;\n input_data = ainput->data;\n mptr = amask->data;\n melsize = amask->descr->elsize;\n vptr = avalscast->data;\n delsize = avalscast->descr->elsize;\n zero = NX_ZERO(amask);\n objarray = (ainput->descr->type_num == PyArray_OBJECT);\n \n /* Handle zero-dimensional case separately */\n if (nd == 0) {\n if (memcmp(mptr,zero,melsize) != 0) {\n /* Copy value element over to input array */\n memcpy(input_data,vptr,delsize);\n if (objarray) Py_INCREF(*((PyObject **)vptr));\n }\n Py_DECREF(amask);\n Py_DECREF(avalscast);\n Py_INCREF(Py_None);\n return Py_None;\n }\n\n /* Walk through mask array, when non-zero is encountered\n copy next value in the vals array to the input array.\n If we get through the value array, repeat it as necessary. \n */\n totmask = PyArray_SIZE(amask);\n copied = 0;\n instrides = ainput->strides;\n inshape = ainput->dimensions;\n for (mindx = 0; mindx < totmask; mindx++) { \n if (memcmp(mptr,zero,melsize) != 0) { \n /* compute indx into input array \n */\n rem_indx = mindx;\n indx = 0;\n for(i=nd-1; i > 0; --i) {\n indx += (rem_indx % inshape[i]) * instrides[i];\n rem_indx /= inshape[i];\n }\n indx += rem_indx * instrides[0];\n /* fprintf(stderr, \"mindx = %d, indx=%d\\n\", mindx, indx); */\n /* Copy value element over to input array */\n memcpy(input_data+indx,vptr,delsize);\n if (objarray) Py_INCREF(*((PyObject **)vptr));\n vptr += delsize;\n copied += 1;\n /* If we move past value data. Reset */\n if (copied >= numvals) vptr = avalscast->data;\n }\n mptr += melsize;\n }\n\n Py_DECREF(amask);\n Py_DECREF(avalscast);\n Py_INCREF(Py_None);\n return Py_None;\n \n fail:\n Py_XDECREF(amask);\n Py_XDECREF(avals);\n Py_XDECREF(avalscast);\n return NULL;\n}\n\n\n/* Decrement the reference count of all objects in **arrays. */\nstatic void cleanup_arrays(PyArrayObject **arrays, int number)\n{\n int k;\n for (k=0; k < number; k++)\n Py_XDECREF((PyObject *)arrays[k]);\n return;\n}\n\n/* All rank-0 arrays are converted to rank-1 arrays */\n/* The number of dimensions of each array with rank less than\n the rank of the array with the most dimensions is increased by \n prepending with a dimenson length of one so that all arrays have\n the same rank. */\n/* Dimensions are checked and unmatched dimensions triggers an error */\n/* Strides for dimensions whose real length is one is set to zero but the dimension\n length is set to the maximum dimensions for the collection of inputs */\nstatic int setup_input_arrays(PyTupleObject *inputs, PyArrayObject **inputarrays, int nin)\n{\n int i, k;\n int maxrank=1;\n int *maxdims;\n PyObject *inputobj;\n PyArrayObject *ain, *tmparray;\n\n /* Convert nested sequences to arrays or just increase reference count\n if already an array */\n for (i=0; i < nin; i++) {\n ain = NULL;\n inputobj = PyTuple_GET_ITEM(inputs,i);\n ain = (PyArrayObject *)PyArray_FromObject(inputobj,PyArray_ObjectType(inputobj,0),0,0);\n if (NULL == ain) {\n cleanup_arrays(inputarrays,i);\n return -1;\n }\n if (PyArray_SIZE(ain)==0) {\n cleanup_arrays(inputarrays,i);\n PyErr_SetString(PyExc_IndexError,\"arraymap: Input arrays of zero-dimensions not supported.\");\n return -1;\n }\n if (ain->nd > maxrank) maxrank = ain->nd;\n if (ain->nd == 0) { /* turn into 1-d array */\n /* convert to rank-1 array */\n if ((ain->dimensions = (int *)malloc(sizeof(int))) == NULL) {\n PyErr_SetString(PyExc_MemoryError, \"arraymap: can't allocate memory for input arrays\");\n cleanup_arrays(inputarrays,i);\n return -1;\n }\n if ((ain->strides = (int *)malloc(sizeof(int))) == NULL) {\n PyErr_SetString(PyExc_MemoryError, \"arraymap: can't allocate memory for input arrays\");\n cleanup_arrays(inputarrays,i);\n free(ain->dimensions);\n return -1;\n }\n ain->nd = 1;\n ain->dimensions[0] = 1;\n ain->strides[0] = ain->descr->elsize;\n }\n inputarrays[i] = ain;\n }\n\n maxdims = (int*)malloc(2*sizeof(int)*maxrank);\n if (NULL == maxdims) {\n PyErr_SetString(PyExc_MemoryError, \"arraymap: can't allocate memory for input arrays\");\n cleanup_arrays(inputarrays,nin);\n return -1;\n }\n\n\n /* Reshape all arrays so they have the same rank (pre-pend with length 1 dimensions) */\n /* We want to replace the header information without copying the data. \n Keeping the reference count correct can be tricky.\n We want to make a new array object with a different header and decrease the \n reference count of the old one without deallocating the data section */\n for (i=0; i < nin; i++) {\n ain = inputarrays[i];\n\n /* Initialize all dimensions to 1 */\n /* Change array shape */\n for (k=0; k < maxrank; k++) \n maxdims[k] = 1; \n for (k=maxrank-ain->nd; k< maxrank; k++) \n maxdims[k] = ain->dimensions[k-maxrank+ain->nd];\n\n tmparray = (PyArrayObject *)PyArray_FromDimsAndData(maxrank,maxdims,ain->descr->type,ain->data);\n if (NULL == tmparray) {\n free(maxdims);\n cleanup_arrays(inputarrays,nin);\n return -1;\n }\n /* tmparray->strides will be zero where new dimensions were added */\n memmove(tmparray->strides+(maxrank-ain->nd),ain->strides,sizeof(int)*ain->nd);\n tmparray->base = (PyObject *)ain; /* When tmparray is deallocated ain will be too */\n inputarrays[i] = tmparray; /* tmparray is new array */\n }\n\n /* Find dimension length for the output arrays (maximum length for each\n dimension) */\n for (k=0; k < maxrank; k++) { \n maxdims[k] = 1;\n for (i=0; i < nin; i++) \n if (inputarrays[i]->dimensions[k] > maxdims[k])\n\tmaxdims[k] = inputarrays[i]->dimensions[k];\n }\n\n\n /* Now set all lengths for input array dimensions to maxdims \n and make strides equal to zero for arrays whose\n real length is 1 for a particular dimension\n */\n\n for (i=0; idimensions[k]) {\n\tain->strides[k] = 0;\n\tain->dimensions[k] = maxdims[k];\n }\n else if (ain->dimensions[k] != maxdims[k]) {\n\tPyErr_SetString(PyExc_ValueError,\"arraymap: Frames are not aligned (mismatched dimensions).\");\n\tcleanup_arrays(inputarrays,nin);\n\tfree(maxdims);\n\treturn -1;\n }\n }\n }\n\n free(maxdims);\n return 0;\n\n}\n\nstatic int type_from_object(PyObject *obj)\n{\n if (PyArray_Check(obj))\n return ((PyArrayObject *)obj)->descr->type_num;\n if (PyComplex_Check(obj)) return PyArray_CDOUBLE;\n if (PyFloat_Check(obj)) return PyArray_DOUBLE;\n if (PyInt_Check(obj) || PyLong_Check(obj)) return PyArray_LONG;\n PyErr_SetString(PyExc_ValueError, \"arraymap: Invalid type for output array.\");\n return -1;\n}\n\nstatic int type_from_char(char typechar)\n{\n switch(typechar) {\n case 'c': return PyArray_CHAR;\n case 'b': return PyArray_UBYTE;\n case '1': return PyArray_SBYTE;\n case 's': return PyArray_SHORT;\n case 'i': return PyArray_INT;\n#ifdef PyArray_UNSIGNED_TYPES\n case 'w': return PyArray_USHORT;\n case 'u': return PyArray_UINT;\n#endif\n case 'l': return PyArray_LONG;\n case 'f': return PyArray_FLOAT;\n case 'd': return PyArray_DOUBLE;\n case 'F': return PyArray_CFLOAT;\n case 'D': return PyArray_CDOUBLE;\n default:\n PyErr_SetString(PyExc_ValueError, \"arraymap: Invalid type for array\");\n return -1;\n }\n}\n\n/* This sets up the output arrays by calling the function with arguments \n the first element of each input arrays. If otypes is NULL, the\n returned value type is used to establish the type of the output\n arrays, otherwise the characters in otypes determine the\n output types */\nstatic int setup_output_arrays(PyObject *func, PyArrayObject **inarr, int nin, PyArrayObject ***outarr, char *otypes, int numtypes)\n{\n PyObject *arglist, *result;\n PyObject *tmpobject;\n PyArrayObject *tmparr;\n int i, nout;\n int nd, *dimensions, type_num;\n\n nd = inarr[0]->nd;\n dimensions = inarr[0]->dimensions;\n\n if ((numtypes == 0) || (otypes == NULL)) { \n /* Call function to get number of outputs */\n\n /* Build argument list */\n if ((arglist = PyTuple_New(nin)) == NULL) {\n return -1;\n }\n /* Construct input argument by creating a tuple with an element\n from each input array (cast to an appropriate Python Object) */\n for (i=0; i < nin; i++) {\n tmparr = inarr[i];\n /* Get first data point */\n tmpobject = NX_GETITEM(tmparr, tmparr->data);\n if (NULL == tmpobject) {\n\tPy_DECREF(arglist);\n\treturn -1;\n }\n PyTuple_SET_ITEM(arglist, i, tmpobject); /* arg1 owns reference to tmpobj now */\n } \n /* Call Python Function */\n if ((result=PyEval_CallObject(func, arglist))==NULL) {\n Py_DECREF(arglist);\n return -1;\n }\n\n Py_DECREF(arglist);\n\n /* If result is a tuple, create output_arrays according \n to output. */\n if (PyTuple_Check(result)) {\n nout = PyTuple_GET_SIZE(result);\n *outarr = (PyArrayObject **)malloc(nout*sizeof(PyArrayObject *));\n if (NULL == *outarr) {\n\tPyErr_SetString(PyExc_MemoryError, \"arraymap: Cannot allocate memory for output arrays.\");\n\tPy_DECREF(result);\n\treturn -1;\n }\n /* Create nout output arrays */\n for (i=0; i < nout; i++) {\n\t/* Determine type */\n\tif ((type_num=type_from_object(PyTuple_GET_ITEM(result, i)))==-1) {\n\t cleanup_arrays(*outarr,i);\n\t Py_DECREF(result);\n\t free(*outarr);\n\t return -1;\n\t}\n\t/* Create output array */\n\t(*outarr)[i] = (PyArrayObject *)PyArray_FromDims(nd,dimensions,type_num);\n\tif (NULL == (*outarr)[i]) {\n\t cleanup_arrays(*outarr,i);\n\t Py_DECREF(result);\n\t free(*outarr);\n\t return -1;\n\t}\n }\n }\n else { /* Only a single output result */\n nout = 1;\n *outarr = (PyArrayObject **)malloc(nout*sizeof(PyArrayObject *));\n if (NULL==*outarr) {\n\tPyErr_SetString(PyExc_MemoryError,\"arraymap: Cannot allocate memory for output arrays.\");\n\tPy_DECREF(result);\n\treturn -1;\n }\n if ((type_num = type_from_object(result))==-1) {\n\tPy_DECREF(result);\n\tfree(*outarr);\n\treturn -1;\n }\n (*outarr)[0] = (PyArrayObject *)PyArray_FromDims(nd,dimensions,type_num);\n if (NULL == (*outarr)[0]) {\n\tPy_DECREF(result);\n\tfree(*outarr);\n\treturn -1;\n }\n }\n Py_DECREF(result);\n }\n\n else { /* Character output types entered */\n nout = numtypes;\n *outarr = (PyArrayObject **)malloc(nout*sizeof(PyArrayObject *));\n if (NULL==*outarr) {\n PyErr_SetString(PyExc_MemoryError,\"arraymap: Cannot allocate memory for output arrays.\");\n return -1;\n }\n /* Create Output arrays */\n for (i=0; i < nout; i++) {\n /* Get type */\n if ((type_num = type_from_char(otypes[i]))==-1) {\n\tcleanup_arrays(*outarr,i);\n\tfree(*outarr);\n\treturn -1;\n }\n /* Create array */\n (*outarr)[i] = (PyArrayObject *)PyArray_FromDims(nd,dimensions,type_num);\n if (NULL == (*outarr)[i]) {\n\tcleanup_arrays(*outarr,i);\n\tfree(*outarr);\n\treturn -1;\n }\n } \n } \n return nout;\n}\n\n\n/* Corresponding dimensions are assumed to match, check before calling. */\n/* No rank-0 arrays (make them rank-1 arrays) */\n\n/* This replicates the standard Ufunc broadcasting rule that if the\n dimension length is one, incrementing does not occur for that dimension. \n\n This is currently done by setting the stride in that dimension to\n zero during input array setup.\n\n The purpose of this function is to perform a for loop over arbitrary\n discontiguous N-D arrays, call the Python function for each set of \n corresponding elements and place the results in the output_array.\n*/ \n#define INCREMENT(ret_ind, nd, max_ind) \\\n{ \\\n int k; \\\n k = (nd) - 1; \\\n if (++(ret_ind)[k] >= (max_ind)[k]) { \\\n while (k >= 0 && ((ret_ind)[k] >= (max_ind)[k]-1)) \\\n (ret_ind)[k--] = 0; \\\n if (k >= 0) (ret_ind)[k]++; \\\n else (ret_ind)[0] = (max_ind)[0]; \\\n } \\\n}\n\n#define CALCINDEX(indx, nd_index, strides, ndim) \\\n{ \\\n int i; \\\n \\\n indx = 0; \\\n for (i=0; i < (ndim); i++) \\\n indx += (nd_index)[i]*(strides)[i]; \\\n} \n\nstatic int loop_over_arrays(PyObject *func, PyArrayObject **inarr, int nin, PyArrayObject **outarr, int nout)\n{\n int i, loop_index;\n int *nd_index, indx_in, indx_out;\n PyArrayObject *in, *out, *tmparr;\n PyObject *result, *tmpobj, *arglist;\n\n in = inarr[0]; /* For any shape information needed */\n out = outarr[0];\n /* Allocate the N-D index initalized to zero. */\n nd_index = (int *)calloc(in->nd,sizeof(int));\n if (NULL == nd_index) {\n PyErr_SetString(PyExc_MemoryError,\"arraymap: Cannot allocate memory for arrays.\");\n return -1;\n }\n /* Build argument list */\n if ((arglist = PyTuple_New(nin)) == NULL) {\n free(nd_index);\n return -1;\n }\n\n loop_index = PyArray_Size((PyObject *)in); /* Total number of Python function calls */\n\n while(loop_index--) { \n\t /* Create input argument list with current element from the input\n\t arrays \n\t */\n\t for (i=0; i < nin; i++) {\n\t\t \n\t\t tmparr = inarr[i];\n\t\t /* Find linear index into this input array */\n\t\t CALCINDEX(indx_in,nd_index,tmparr->strides,in->nd);\n\t\t /* Get object at this index */\n\t\t tmpobj = NX_GETITEM(tmparr, (tmparr->data+indx_in));\n\t\t if (NULL == tmpobj) {\n\t\t\t Py_DECREF(arglist);\n\t\t\t free(nd_index);\n\t\t\t return -1;\n\t\t }\n\t\t /* This steals reference of tmpobj */\n\t\t PyTuple_SET_ITEM(arglist, i, tmpobj); \n\t }\n\t \n\t /* Call Python Function for this set of inputs */\n\t if ((result=PyEval_CallObject(func, arglist))==NULL) {\n\t\t Py_DECREF(arglist);\n\t\t free(nd_index);\n\t\t return -1;\n\t } \n\t \n\t /* Find index into (all) output arrays */\n\t CALCINDEX(indx_out,nd_index,out->strides,out->nd);\n\t \n\t /* Copy the results to the output arrays */\n\t if (1==nout) {\n\t\t int rval = NX_SETITEM(\n\t\t\t outarr[0], (outarr[0]->data+indx_out), result);\n\t\t if (rval==-1) {\n\t\t\t free(nd_index);\n\t\t\t Py_DECREF(arglist);\n\t\t\t Py_DECREF(result);\n\t\t\t return -1;\n\t\t }\n\t }\n\t else if (PyTuple_Check(result)) {\n\t\t for (i=0; idata+indx_out), \n\t\t\t\t PyTuple_GET_ITEM(result,i));\n\t\t\t free(nd_index);\n\t\t\t Py_DECREF(arglist);\n\t\t\t Py_DECREF(result);\n\t\t\t return -1;\n\t\t }\n\t \n\t } else { \n\t\t PyErr_SetString(PyExc_ValueError,\"arraymap: Function output of incorrect type.\");\n\t\t free(nd_index);\n\t\t Py_DECREF(arglist);\n\t\t Py_DECREF(result);\n\t\t return -1;\n\t }\n\n\t /* Increment the index counter */\n\t INCREMENT(nd_index,in->nd,in->dimensions);\n\t Py_DECREF(result);\n\t \n }\n Py_DECREF(arglist);\n free(nd_index);\n return 0;\n} \n\nstatic PyObject *build_output(PyArrayObject **outarr,int nout)\n{\n int i;\n PyObject *out;\n\n if (1==nout) return PyArray_Return(outarr[0]);\n if ((out=PyTuple_New(nout))==NULL) return NULL;\n for (i=0; i= 1.3) wrapped\nobjects from Weave. SWIG-1.3 wraps objects differently from SWIG-1.1.\n\nThe code here is based on wx_spec.py. However, this module is more\nlike a template for any SWIG2 wrapped converter. To wrap any special\ncode that uses SWIG the user simply needs to override the defaults in\nthe swig2_converter class. These special circumstances arise when one\nhas wrapped code that uses C++ namespaces. However, for most\nstraightforward SWIG wrappers this converter should work fine out of\nthe box.\n\nNewer versions of SWIG (>=1.3.22) represent the wrapped object using a\nPyCObject and also a PySwigObject (>=1.3.24). This code supports all\nof these options transparently.\n\nSince SWIG-1.3.x is under intense development there are several issues\nto consider when using the swig2_converter.\n\n 1. For SWIG versions <= 1.3.19, the runtime code was built either\n into the module or into a separate library called libswigpy (or\n something like that). In the latter case, the users Python\n modules were linked to this library and shared type information\n (this was common for large projects with several modules that\n needed to share type information). If you are using multiple\n inheritance and want to be certain that type coercions from a\n derived class to a base class are done correctly, you will need to\n link to the libswigpy library. You will then need to add these to\n the keyword arguments passed along to `weave.inline`:\n\n a. Add a define_macros=[('SWIG_NOINCLUDE', None)]\n\n b. Add the swigpy library to the libraries like so:\n libraries=['swigpy']\n\n c. If the libswigpy is in a non-standard location add the path\n to the library_dirs argument as\n `library_dirs=['/usr/local/lib']` or whatever.\n\n OTOH if you do not need to link to libswigpy (this is likely if\n you are not using multiple inheritance), then you do not need the\n above. However you are likely to get an annoying message of the\n form::\n\n WARNING: swig_type_info is NULL.\n\n for each SWIG object you are inlining (during each call). To\n avoid this add a define_macros=[('NO_SWIG_WARN', None)].\n\n 2. Since keeping track of a separate runtime is a pain, for SWIG\n versions >= 1.3.23 the type information was stored inside a\n special module. Thus in these versions there is no need to link\n to this special SWIG runtime library. This module handles these\n cases automatically and nothing special need be done.\n\n Using modules wrapped with different SWIG versions simultaneously.\n Lets say you have library 'A' that is wrapped using SWIG version\n 1.3.20. Then lets say you have a library 'B' wrapped using\n version 1.3.24. Now if you want to use both in weave.inline, we\n have a serious problem. The trouble is that both 'A' and 'B' may\n use different and incompatible runtime layouts. It is impossible\n to get the type conversions right in these cases. Thus it is\n strongly advised that you use one version of SWIG to wrap all of\n the code that you intend to inline using weave. Note that you can\n certainly use SWIG-1.3.23 for everything and do not have to use\n the latest and greatest SWIG to use weave.inline. Just make sure\n that when inlining SWIG wrapped objects that all such objects use\n the same runtime layout. By default, if you are using different\n versions and do need to inline these objects, the latest layout\n will be assumed. This might leave you with holes in your feet,\n but you have been warned. You can force the converter to use a\n specific runtime version if you want (see the\n `swig2_converter.__init__` method and its documentation).\n\n\nPrabhu Ramachandran \n\"\"\"\n\nimport sys\nimport common_info\nfrom c_spec import common_base_converter\nimport converters\nimport swigptr2\n\n\n#----------------------------------------------------------------------\n# Commonly used functions for the type query. This is done mainly to\n# avoid code duplication.\n#----------------------------------------------------------------------\nswig2_common_code = \\\n'''\nswig_type_info *\nWeave_SWIG_TypeQuery(const char *name) {\n swig_type_info *ty = SWIG_TypeQuery(name);\n#ifndef NO_SWIG_WARN\n if (ty == NULL) {\n printf(\"WARNING: swig_type_info is NULL.\\\\n\");\n }\n#endif\n return ty;\n}\n'''\n#----------------------------------------------------------------------\n# This code obtains the C++ pointer given a a SWIG2 wrapped C++ object\n# in Python.\n#----------------------------------------------------------------------\n\nswig2_py_to_c_template = \\\n\"\"\"\nclass %(type_name)s_handler\n{\npublic: \n %(c_type)s convert_to_%(type_name)s(PyObject* py_obj, const char* name)\n {\n %(c_type)s c_ptr;\n swig_type_info *ty = Weave_SWIG_TypeQuery(\"%(c_type)s\");\n // work on this error reporting...\n if (SWIG_ConvertPtr(py_obj, (void **) &c_ptr, ty,\n SWIG_POINTER_EXCEPTION | 0) == -1) {\n handle_conversion_error(py_obj,\"%(type_name)s\", name);\n }\n %(inc_ref_count)s\n return c_ptr;\n }\n \n %(c_type)s py_to_%(type_name)s(PyObject* py_obj,const char* name)\n {\n %(c_type)s c_ptr;\n swig_type_info *ty = Weave_SWIG_TypeQuery(\"%(c_type)s\");\n // work on this error reporting...\n if (SWIG_ConvertPtr(py_obj, (void **) &c_ptr, ty,\n SWIG_POINTER_EXCEPTION | 0) == -1) {\n handle_bad_type(py_obj,\"%(type_name)s\", name);\n }\n %(inc_ref_count)s\n return c_ptr;\n }\n};\n\n%(type_name)s_handler x__%(type_name)s_handler = %(type_name)s_handler();\n#define convert_to_%(type_name)s(py_obj,name) \\\\\n x__%(type_name)s_handler.convert_to_%(type_name)s(py_obj,name)\n#define py_to_%(type_name)s(py_obj,name) \\\\\n x__%(type_name)s_handler.py_to_%(type_name)s(py_obj,name)\n\n\"\"\"\n\n#----------------------------------------------------------------------\n# This code generates a new SWIG pointer object given a C++ pointer.\n#\n# Important note: The thisown flag of the returned object is set to 0\n# by default.\n#----------------------------------------------------------------------\n\nswig2_c_to_py_template = \"\"\"\nPyObject* %(type_name)s_to_py(void *obj)\n{\n swig_type_info *ty = Weave_SWIG_TypeQuery(\"%(c_type)s\");\n return SWIG_NewPointerObj(obj, ty, 0);\n}\n\"\"\"\n\nclass swig2_converter(common_base_converter):\n \"\"\" A converter for SWIG >= 1.3 wrapped objects.\"\"\"\n def __init__(self, class_name=\"undefined\", pycobj=0, runtime_version=None):\n \"\"\"Initializes the instance.\n\n Parameters\n ----------\n\n - class_name : `string`\n\n Name of class, this is set dynamically at build time by the\n `type_spec` method.\n\n - pycobj : `int`\n\n If `pycobj` is 0 then code is generated to deal with string\n representations of the SWIG wrapped pointer. If it is 1,\n then code is generated to deal with a PyCObject. If it is 2\n then code is generated to deal with with PySwigObject.\n\n - runtime_version : `int`\n\n Specifies the SWIG_RUNTIME_VERSION to use. Defaults to\n `None`. In this case the runtime is automatically\n determined. This option is useful if you want to force the\n runtime_version to be a specific one and override the\n auto-detected one.\n\n \"\"\"\n self.class_name = class_name\n self.pycobj = pycobj # This is on if a PyCObject has been used.\n self.runtime_version = runtime_version\n common_base_converter.__init__(self)\n\n def _get_swig_runtime_version(self):\n \"\"\"This method tries to deduce the SWIG runtime version. If\n the SWIG runtime layout changes, the `SWIG_TypeQuery` function\n will not work properly.\n \"\"\"\n versions = []\n for key in sys.modules.keys():\n idx = key.find('swig_runtime_data')\n if idx > -1:\n ver = int(key[idx+17:])\n if ver not in versions:\n versions.append(ver)\n nver = len(versions)\n if nver == 0:\n return 0\n elif nver == 1:\n return versions[0]\n else:\n print \"WARNING: Multiple SWIG versions detected. No version was\"\n print \"explicitly specified. Using the highest possible version.\"\n return max(versions)\n\n def init_info(self, runtime=0):\n \"\"\"Keyword arguments:\n \n runtime -- If false (default), the user does not need to\n link to the swig runtime (libswipy). Newer versions of SWIG\n (>=1.3.23) do not need to build a SWIG runtime library at\n all. In these versions of SWIG the swig_type_info is stored\n in a common module. swig_type_info stores the type\n information and the type converters to cast pointers\n correctly.\n\n With earlier versions of SWIG (<1.3.22) one has to either\n link the weave module with a SWIG runtime library\n (libswigpy) in order to get the swig_type_info. Thus, if\n `runtime` is True, the user must link to the swipy runtime\n library and in this case type checking will be performed.\n With these versions of SWIG, if runtime is `False`, no type\n checking is done.\n\n \"\"\"\n common_base_converter.init_info(self)\n # These are generated on the fly instead of defined at \n # the class level.\n self.type_name = self.class_name\n self.c_type = self.class_name + \"*\"\n self.return_type = self.class_name + \"*\"\n self.to_c_return = None # not used\n self.check_func = None # not used\n\n if self.pycobj == 1:\n self.define_macros.append((\"SWIG_COBJECT_TYPES\", None))\n self.define_macros.append((\"SWIG_COBJECT_PYTHON\", None))\n elif self.pycobj == 2:\n self.define_macros.append((\"SWIG_COBJECT_TYPES\", None))\n \n \n if self.runtime_version is None:\n self.runtime_version = self._get_swig_runtime_version()\n\n rv = self.runtime_version\n if rv == 0:\n # The runtime option is only useful for older versions of\n # SWIG.\n if runtime:\n self.define_macros.append((\"SWIG_NOINCLUDE\", None))\n self.support_code.append(swigptr2.swigptr2_code_v0)\n elif rv == 1:\n self.support_code.append(swigptr2.swigptr2_code_v1)\n elif rv == 2:\n self.support_code.append(swigptr2.swigptr2_code_v2)\n else:\n raise AssertionError, \"Unsupported version of the SWIG runtime:\", rv\n\n self.support_code.append(swig2_common_code)\n\n def _get_swig_type(self, value):\n \"\"\"Given the object in the form of `value`, this method\n returns information on the SWIG internal object repesentation\n type. Different versions of SWIG use different object\n representations. This method provides information on the type\n of internal representation.\n\n Currently returns one of ['', 'str', 'pycobj', 'pyswig'].\n \"\"\"\n swig_typ = ''\n if hasattr(value, 'this'):\n type_this = type(value.this)\n type_str = str(type_this)\n if type_this == type('str'):\n try:\n data = value.this.split('_')\n if data[2] == 'p':\n swig_typ = 'str'\n except AttributeError:\n pass\n elif type_str == \"\":\n swig_typ = 'pycobj'\n elif type_str.find('PySwig') > -1:\n swig_typ = 'pyswig'\n\n return swig_typ \n \n def type_match(self,value):\n \"\"\" This is a generic type matcher for SWIG-1.3 objects. For\n specific instances, override this method. The method also\n handles cases where SWIG uses a PyCObject for the `this`\n attribute and not a string.\n\n \"\"\"\n if self._get_swig_type(value):\n return 1\n else:\n return 0\n\n def generate_build_info(self):\n if self.class_name != \"undefined\":\n res = common_base_converter.generate_build_info(self)\n else:\n # if there isn't a class_name, we don't want the\n # support_code to be included\n import base_info\n res = base_info.base_info()\n return res\n \n def py_to_c_code(self):\n return swig2_py_to_c_template % self.template_vars()\n\n def c_to_py_code(self):\n return swig2_c_to_py_template % self.template_vars()\n \n def type_spec(self,name,value):\n \"\"\" This returns a generic type converter for SWIG-1.3\n objects. For specific instances, override this function if\n necessary.\"\"\"\n # factory\n swig_ob_type = self._get_swig_type(value)\n pycobj = 0\n if swig_ob_type == 'str':\n class_name = value.this.split('_')[-1]\n elif swig_ob_type == 'pycobj':\n pycobj = 1\n elif swig_ob_type == 'pyswig':\n pycobj = 2\n else:\n raise AssertionError, \"Does not look like a SWIG object: %s\"%value\n\n if pycobj:\n class_name = value.__class__.__name__\n if class_name[-3:] == 'Ptr':\n class_name = class_name[:-3]\n \n new_spec = self.__class__(class_name, pycobj, self.runtime_version)\n new_spec.name = name\n return new_spec\n\n def __cmp__(self,other):\n #only works for equal\n res = -1\n try:\n res = cmp(self.name,other.name) or \\\n cmp(self.__class__, other.__class__) or \\\n cmp(self.class_name, other.class_name) or \\\n cmp(self.type_name,other.type_name)\n except:\n pass\n return res\n\n#----------------------------------------------------------------------\n# Uncomment the next line if you want this to be a default converter\n# that is magically invoked by inline.\n#----------------------------------------------------------------------\n#converters.default.insert(0, swig2_converter())\n", "source_code_before": "\"\"\"\nThis module allows one to use SWIG2 (SWIG version >= 1.3) wrapped\nobjects from Weave. SWIG-1.3 wraps objects differently from SWIG-1.1.\n\nThe code here is based on wx_spec.py. However, this module is more\nlike a template for any SWIG2 wrapped converter. To wrap any special\ncode that uses SWIG the user simply needs to override the defaults in\nthe swig2_converter class. These special circumstances arise when one\nhas wrapped code that uses C++ namespaces. However, for most\nstraightforward SWIG wrappers this converter should work fine out of\nthe box.\n\nNewer versions of SWIG (>=1.3.22) represent the wrapped object using a\nPyCObject and also a PySwigObject (>=1.3.24). This code supports all\nof these options transparently.\n\nSince SWIG-1.3.x is under intense development there are several issues\nto consider when using the swig2_converter.\n\n 1. For SWIG versions <= 1.3.19, the runtime code was built either\n into the module or into a separate library called libswigpy (or\n something like that). In the latter case, the users Python\n modules were linked to this library and shared type information\n (this was common for large projects with several modules that\n needed to share type information). If you are using multiple\n inheritance and want to be certain that type coercions from a\n derived class to a base class are done correctly, you will need to\n link to the libswigpy library. You will then need to add these to\n the keyword arguments passed along to `weave.inline`:\n\n a. Add a define_macros=[('SWIG_NOINCLUDE', None)]\n\n b. Add the swigpy library to the libraries like so:\n libraries=['swigpy']\n\n c. If the libswigpy is in a non-standard location add the path\n to the library_dirs argument as\n `library_dirs=['/usr/local/lib']` or whatever.\n\n OTOH if you do not need to link to libswigpy (this is likely if\n you are not using multiple inheritance), then you do not need the\n above. However you are likely to get an annoying message of the\n form::\n\n WARNING: swig_type_info is NULL.\n\n for each SWIG object you are inlining (during each call). To\n avoid this add a define_macros=[('NO_SWIG_WARN', None)].\n\n 2. Since keeping track of a separate runtime is a pain, for SWIG\n versions >= 1.3.23 the type information was stored inside a\n special module. Thus in these versions there is no need to link\n to this special SWIG runtime library. This module handles these\n cases automatically and nothing special need be done.\n\n Using modules wrapped with different SWIG versions simultaneously.\n Lets say you have library 'A' that is wrapped using SWIG version\n 1.3.20. Then lets say you have a library 'B' wrapped using\n version 1.3.24. Now if you want to use both in weave.inline, we\n have a serious problem. The trouble is that both 'A' and 'B' may\n use different and incompatible runtime layouts. It is impossible\n to get the type conversions right in these cases. Thus it is\n strongly advised that you use one version of SWIG to wrap all of\n the code that you intend to inline using weave. Note that you can\n certainly use SWIG-1.3.23 for everything and do not have to use\n the latest and greatest SWIG to use weave.inline. Just make sure\n that when inlining SWIG wrapped objects that all such objects use\n the same runtime layout. By default, if you are using different\n versions and do need to inline these objects, the latest layout\n will be assumed. This might leave you with holes in your feet,\n but you have been warned. You can force the converter to use a\n specific runtime version if you want (see the\n `swig2_converter.__init__` method and its documentation).\n\n\nPrabhu Ramachandran \n\"\"\"\n\nimport sys\nimport common_info\nfrom c_spec import common_base_converter\nimport converters\nimport swigptr2\n\n\n#----------------------------------------------------------------------\n# Commonly used functions for the type query. This is done mainly to\n# avoid code duplication.\n#----------------------------------------------------------------------\nswig2_common_code = \\\n'''\nswig_type_info *\nWeave_SWIG_TypeQuery(const char *name) {\n swig_type_info *ty = SWIG_TypeQuery(name);\n#ifndef NO_SWIG_WARN\n if (ty == NULL) {\n printf(\"WARNING: swig_type_info is NULL.\\\\n\");\n }\n#endif\n return ty;\n}\n'''\n#----------------------------------------------------------------------\n# This code obtains the C++ pointer given a a SWIG2 wrapped C++ object\n# in Python.\n#----------------------------------------------------------------------\n\nswig2_py_to_c_template = \\\n\"\"\"\nclass %(type_name)s_handler\n{\npublic: \n %(c_type)s convert_to_%(type_name)s(PyObject* py_obj, const char* name)\n {\n %(c_type)s c_ptr;\n swig_type_info *ty = Weave_SWIG_TypeQuery(\"%(c_type)s\");\n // work on this error reporting...\n if (SWIG_ConvertPtr(py_obj, (void **) &c_ptr, ty,\n SWIG_POINTER_EXCEPTION | 0) == -1) {\n handle_conversion_error(py_obj,\"%(type_name)s\", name);\n }\n %(inc_ref_count)s\n return c_ptr;\n }\n \n %(c_type)s py_to_%(type_name)s(PyObject* py_obj,const char* name)\n {\n %(c_type)s c_ptr;\n swig_type_info *ty = Weave_SWIG_TypeQuery(\"%(c_type)s\");\n // work on this error reporting...\n if (SWIG_ConvertPtr(py_obj, (void **) &c_ptr, ty,\n SWIG_POINTER_EXCEPTION | 0) == -1) {\n handle_bad_type(py_obj,\"%(type_name)s\", name);\n }\n %(inc_ref_count)s\n return c_ptr;\n }\n};\n\n%(type_name)s_handler x__%(type_name)s_handler = %(type_name)s_handler();\n#define convert_to_%(type_name)s(py_obj,name) \\\\\n x__%(type_name)s_handler.convert_to_%(type_name)s(py_obj,name)\n#define py_to_%(type_name)s(py_obj,name) \\\\\n x__%(type_name)s_handler.py_to_%(type_name)s(py_obj,name)\n\n\"\"\"\n\n#----------------------------------------------------------------------\n# This code generates a new SWIG pointer object given a C++ pointer.\n#\n# Important note: The thisown flag of the returned object is set to 0\n# by default.\n#----------------------------------------------------------------------\n\nswig2_c_to_py_template = \"\"\"\nPyObject* %(type_name)s_to_py(void *obj)\n{\n swig_type_info *ty = Weave_SWIG_TypeQuery(\"%(c_type)s\");\n return SWIG_NewPointerObj(obj, ty, 0);\n}\n\"\"\"\n\nclass swig2_converter(common_base_converter):\n \"\"\" A converter for SWIG >= 1.3 wrapped objects.\"\"\"\n def __init__(self, class_name=\"undefined\", pycobj=0, runtime_version=None):\n \"\"\"Initializes the instance.\n\n Parameters\n ----------\n\n - class_name : `string`\n\n Name of class, this is set dynamically at build time by the\n `type_spec` method.\n\n - pycobj : `int`\n\n If `pycobj` is 0 then code is generated to deal with string\n representations of the SWIG wrapped pointer. If it is 1,\n then code is generated to deal with a PyCObject. If it is 2\n then code is generated to deal with with PySwigObject.\n\n - runtime_version : `int`\n\n Specifies the SWIG_RUNTIME_VERSION to use. Defaults to\n `None`. In this case the runtime is automatically\n determined. This option is useful if you want to force the\n runtime_version to be a specific one and override the\n auto-detected one.\n\n \"\"\"\n self.class_name = class_name\n self.pycobj = pycobj # This is on if a PyCObject has been used.\n self.runtime_version = runtime_version\n common_base_converter.__init__(self)\n\n def _get_swig_runtime_version(self):\n \"\"\"This method tries to deduce the SWIG runtime version. If\n the SWIG runtime layout changes, the `SWIG_TypeQuery` function\n will not work properly.\n \"\"\"\n versions = []\n for key in sys.modules.keys():\n idx = key.find('swig_runtime_data')\n if idx > -1:\n ver = int(key[idx+17:])\n if ver not in versions:\n versions.append(ver)\n nver = len(versions)\n if nver == 0:\n return 0\n elif nver == 1:\n return versions[0]\n else:\n print \"WARNING: Multiple SWIG versions detected. No version was\"\n print \"explicitly specified. Using the highest possible version.\"\n return max(versions)\n\n def init_info(self, runtime=0):\n \"\"\"Keyword arguments:\n \n runtime -- If false (default), the user does not need to\n link to the swig runtime (libswipy). Newer versions of SWIG\n (>=1.3.23) do not need to build a SWIG runtime library at\n all. In these versions of SWIG the swig_type_info is stored\n in a common module. swig_type_info stores the type\n information and the type converters to cast pointers\n correctly.\n\n With earlier versions of SWIG (<1.3.22) one has to either\n link the weave module with a SWIG runtime library\n (libswigpy) in order to get the swig_type_info. Thus, if\n `runtime` is True, the user must link to the swipy runtime\n library and in this case type checking will be performed.\n With these versions of SWIG, if runtime is `False`, no type\n checking is done.\n\n \"\"\"\n common_base_converter.init_info(self)\n # These are generated on the fly instead of defined at \n # the class level.\n self.type_name = self.class_name\n self.c_type = self.class_name + \"*\"\n self.return_type = self.class_name + \"*\"\n self.to_c_return = None # not used\n self.check_func = None # not used\n\n if self.pycobj == 1:\n self.define_macros.append((\"SWIG_COBJECT_TYPES\", None))\n self.define_macros.append((\"SWIG_COBJECT_PYTHON\", None))\n elif self.pycobj == 2:\n self.define_macros.append((\"SWIG_COBJECT_TYPES\", None))\n \n \n if self.runtime_version is None:\n self.runtime_version = self._get_swig_runtime_version()\n\n rv = self.runtime_version\n if rv == 0:\n # The runtime option is only useful for older versions of\n # SWIG.\n if runtime:\n self.define_macros.append((\"SWIG_NOINCLUDE\", None))\n self.support_code.append(swigptr2.swigptr2_code_v0)\n elif rv == 1:\n self.support_code.append(swigptr2.swigptr2_code_v1)\n else:\n raise AssertionError, \"Unsupported version of the SWIG runtime:\", rv\n\n self.support_code.append(swig2_common_code)\n\n def _get_swig_type(self, value):\n \"\"\"Given the object in the form of `value`, this method\n returns information on the SWIG internal object repesentation\n type. Different versions of SWIG use different object\n representations. This method provides information on the type\n of internal representation.\n\n Currently returns one of ['', 'str', 'pycobj', 'pyswig'].\n \"\"\"\n swig_typ = ''\n if hasattr(value, 'this'):\n type_this = type(value.this)\n type_str = str(type_this)\n if type_this == type('str'):\n try:\n data = value.this.split('_')\n if data[2] == 'p':\n swig_typ = 'str'\n except AttributeError:\n pass\n elif type_str == \"\":\n swig_typ = 'pycobj'\n elif type_str.find('PySwig') > -1:\n swig_typ = 'pyswig'\n\n return swig_typ \n \n def type_match(self,value):\n \"\"\" This is a generic type matcher for SWIG-1.3 objects. For\n specific instances, override this method. The method also\n handles cases where SWIG uses a PyCObject for the `this`\n attribute and not a string.\n\n \"\"\"\n if self._get_swig_type(value):\n return 1\n else:\n return 0\n\n def generate_build_info(self):\n if self.class_name != \"undefined\":\n res = common_base_converter.generate_build_info(self)\n else:\n # if there isn't a class_name, we don't want the\n # support_code to be included\n import base_info\n res = base_info.base_info()\n return res\n \n def py_to_c_code(self):\n return swig2_py_to_c_template % self.template_vars()\n\n def c_to_py_code(self):\n return swig2_c_to_py_template % self.template_vars()\n \n def type_spec(self,name,value):\n \"\"\" This returns a generic type converter for SWIG-1.3\n objects. For specific instances, override this function if\n necessary.\"\"\"\n # factory\n swig_ob_type = self._get_swig_type(value)\n pycobj = 0\n if swig_ob_type == 'str':\n class_name = value.this.split('_')[-1]\n elif swig_ob_type == 'pycobj':\n pycobj = 1\n elif swig_ob_type == 'pyswig':\n pycobj = 2\n else:\n raise AssertionError, \"Does not look like a SWIG object: %s\"%value\n\n if pycobj:\n class_name = value.__class__.__name__\n if class_name[-3:] == 'Ptr':\n class_name = class_name[:-3]\n \n new_spec = self.__class__(class_name, pycobj, self.runtime_version)\n new_spec.name = name\n return new_spec\n\n def __cmp__(self,other):\n #only works for equal\n res = -1\n try:\n res = cmp(self.name,other.name) or \\\n cmp(self.__class__, other.__class__) or \\\n cmp(self.class_name, other.class_name) or \\\n cmp(self.type_name,other.type_name)\n except:\n pass\n return res\n\n#----------------------------------------------------------------------\n# Uncomment the next line if you want this to be a default converter\n# that is magically invoked by inline.\n#----------------------------------------------------------------------\n#converters.default.insert(0, swig2_converter())\n", "methods": [ { "name": "__init__", "long_name": "__init__( self , class_name = \"undefined\" , pycobj = 0 , runtime_version = None )", "filename": "swig2_spec.py", "nloc": 5, "complexity": 1, "token_count": 39, "parameters": [ "self", "class_name", "pycobj", "runtime_version" ], "start_line": 165, "end_line": 195, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 31, "top_nesting_level": 1 }, { "name": "_get_swig_runtime_version", "long_name": "_get_swig_runtime_version( self )", "filename": "swig2_spec.py", "nloc": 17, "complexity": 6, "token_count": 93, "parameters": [ "self" ], "start_line": 197, "end_line": 217, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 21, "top_nesting_level": 1 }, { "name": "init_info", "long_name": "init_info( self , runtime = 0 )", "filename": "swig2_spec.py", "nloc": 26, "complexity": 8, "token_count": 198, "parameters": [ "self", "runtime" ], "start_line": 219, "end_line": 272, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 54, "top_nesting_level": 1 }, { "name": "_get_swig_type", "long_name": "_get_swig_type( self , value )", "filename": "swig2_spec.py", "nloc": 17, "complexity": 7, "token_count": 92, "parameters": [ "self", "value" ], "start_line": 274, "end_line": 299, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "type_match", "long_name": "type_match( self , value )", "filename": "swig2_spec.py", "nloc": 5, "complexity": 2, "token_count": 22, "parameters": [ "self", "value" ], "start_line": 301, "end_line": 311, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 }, { "name": "generate_build_info", "long_name": "generate_build_info( self )", "filename": "swig2_spec.py", "nloc": 7, "complexity": 2, "token_count": 33, "parameters": [ "self" ], "start_line": 313, "end_line": 321, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "py_to_c_code", "long_name": "py_to_c_code( self )", "filename": "swig2_spec.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 323, "end_line": 324, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "c_to_py_code", "long_name": "c_to_py_code( self )", "filename": "swig2_spec.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 326, "end_line": 327, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "type_spec", "long_name": "type_spec( self , name , value )", "filename": "swig2_spec.py", "nloc": 18, "complexity": 6, "token_count": 113, "parameters": [ "self", "name", "value" ], "start_line": 329, "end_line": 352, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 24, "top_nesting_level": 1 }, { "name": "__cmp__", "long_name": "__cmp__( self , other )", "filename": "swig2_spec.py", "nloc": 10, "complexity": 5, "token_count": 66, "parameters": [ "self", "other" ], "start_line": 354, "end_line": 364, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 } ], "methods_before": [ { "name": "__init__", "long_name": "__init__( self , class_name = \"undefined\" , pycobj = 0 , runtime_version = None )", "filename": "swig2_spec.py", "nloc": 5, "complexity": 1, "token_count": 39, "parameters": [ "self", "class_name", "pycobj", "runtime_version" ], "start_line": 165, "end_line": 195, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 31, "top_nesting_level": 1 }, { "name": "_get_swig_runtime_version", "long_name": "_get_swig_runtime_version( self )", "filename": "swig2_spec.py", "nloc": 17, "complexity": 6, "token_count": 93, "parameters": [ "self" ], "start_line": 197, "end_line": 217, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 21, "top_nesting_level": 1 }, { "name": "init_info", "long_name": "init_info( self , runtime = 0 )", "filename": "swig2_spec.py", "nloc": 24, "complexity": 7, "token_count": 183, "parameters": [ "self", "runtime" ], "start_line": 219, "end_line": 270, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 52, "top_nesting_level": 1 }, { "name": "_get_swig_type", "long_name": "_get_swig_type( self , value )", "filename": "swig2_spec.py", "nloc": 17, "complexity": 7, "token_count": 92, "parameters": [ "self", "value" ], "start_line": 272, "end_line": 297, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 26, "top_nesting_level": 1 }, { "name": "type_match", "long_name": "type_match( self , value )", "filename": "swig2_spec.py", "nloc": 5, "complexity": 2, "token_count": 22, "parameters": [ "self", "value" ], "start_line": 299, "end_line": 309, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 }, { "name": "generate_build_info", "long_name": "generate_build_info( self )", "filename": "swig2_spec.py", "nloc": 7, "complexity": 2, "token_count": 33, "parameters": [ "self" ], "start_line": 311, "end_line": 319, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "py_to_c_code", "long_name": "py_to_c_code( self )", "filename": "swig2_spec.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 321, "end_line": 322, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "c_to_py_code", "long_name": "c_to_py_code( self )", "filename": "swig2_spec.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self" ], "start_line": 324, "end_line": 325, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "type_spec", "long_name": "type_spec( self , name , value )", "filename": "swig2_spec.py", "nloc": 18, "complexity": 6, "token_count": 113, "parameters": [ "self", "name", "value" ], "start_line": 327, "end_line": 350, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 24, "top_nesting_level": 1 }, { "name": "__cmp__", "long_name": "__cmp__( self , other )", "filename": "swig2_spec.py", "nloc": 10, "complexity": 5, "token_count": 66, "parameters": [ "self", "other" ], "start_line": 352, "end_line": 362, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 } ], "changed_methods": [ { "name": "init_info", "long_name": "init_info( self , runtime = 0 )", "filename": "swig2_spec.py", "nloc": 26, "complexity": 8, "token_count": 198, "parameters": [ "self", "runtime" ], "start_line": 219, "end_line": 272, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 54, "top_nesting_level": 1 } ], "nloc": 252, "complexity": 39, "token_count": 723, "diff_parsed": { "added": [ " elif rv == 2:", " self.support_code.append(swigptr2.swigptr2_code_v2)" ], "deleted": [] } }, { "old_path": "weave/swigptr2.py", "new_path": "weave/swigptr2.py", "filename": "swigptr2.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -1624,4 +1624,1348 @@\n #define SWIG_TypeClientData(ti, cd) SWIG_Runtime_TypeClientData(ti, cd)\n #define SWIG_PropagateClientData(ti) SWIG_Runtime_PropagateClientData(ti)\n \n+\"\"\"\n+\n+######################################################################\n+# This is for SWIG-1.3.x where x >= 25.\n+# SWIG_RUNTIME_VERSION == \"2\"\n+\n+# All this does is to include the contents of the file generated by\n+# this command:\n+# swig -python -external-runtime\n+swigptr2_code_v2 = \"\"\"\n+/* ----------------------------------------------------------------------------\n+ * This file was automatically generated by SWIG (http://www.swig.org).\n+ * Version 1.3.25\n+ * \n+ * This file is not intended to be easily readable and contains a number of \n+ * coding conventions designed to improve portability and efficiency. Do not make\n+ * changes to this file unless you know what you are doing--modify the SWIG \n+ * interface file instead. \n+ * ----------------------------------------------------------------------------- */\n+\n+/***********************************************************************\n+ *\n+ * This section contains generic SWIG labels for method/variable\n+ * declarations/attributes, and other compiler dependent labels.\n+ *\n+ ************************************************************************/\n+\n+/* \n+ SWIGTEMPLATEDISAMBIGUATOR is needed when wrapping template calls\n+ (cwrap.c:Swig_cfunction_call/Swig_cmethod_call), as in\n+\n+ result = nspace::template function(arg1);\n+ result = arg1->template method(arg2);\n+\n+ SWIGTEMPLATEDISAMBIGUATOR is compiler dependent (common.swg),\n+ - SUN Studio requires 'template', \n+ - gcc-3.4 forbids the use of 'template'.\n+ - gcc-3.2.3 produces internal errors if you use 'template'\n+*/\n+#ifndef SWIGTEMPLATEDISAMBIGUATOR\n+# if defined(__SUNPRO_CC) \n+# define SWIGTEMPLATEDISAMBIGUATOR template\n+# else\n+# define SWIGTEMPLATEDISAMBIGUATOR \n+# endif\n+#endif\n+\n+/* inline attribute */\n+#ifndef SWIGINLINE\n+# if defined(__cplusplus) || (defined(__GNUC__) && !defined(__STRICT_ANSI__))\n+# define SWIGINLINE inline\n+# else\n+# define SWIGINLINE\n+# endif\n+#endif\n+\n+/* attritbute passed for some compilers to avoid 'unused' warnings */\n+#ifndef SWIGUNUSED\n+# if defined(__GNUC__) || defined(__ICC)\n+# define SWIGUNUSED __attribute__ ((unused)) \n+# else\n+# define SWIGUNUSED \n+# endif\n+#endif\n+\n+/* internal SWIG method */\n+#ifndef SWIGINTERN\n+# define SWIGINTERN static SWIGUNUSED\n+#endif\n+\n+/* internal inline SWIG method */\n+#ifndef SWIGINTERNINLINE\n+# define SWIGINTERNINLINE SWIGINTERN SWIGINLINE\n+#endif\n+\n+/* how we export a method such that it can go in to a shared or dll library */\n+#ifndef SWIGEXPORT\n+# if defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__)\n+# if defined(_MSC_VER) || defined(__GNUC__)\n+# if defined(STATIC_LINKED)\n+# define SWIGEXPORT(a) a\n+# else\n+# define SWIGEXPORT(a) __declspec(dllexport) a\n+# endif\n+# else\n+# if defined(__BORLANDC__)\n+# define SWIGEXPORT(a) a _export\n+# else\n+# define SWIGEXPORT(a) a\n+# endif\n+# endif\n+# else\n+# define SWIGEXPORT(a) a\n+# endif\n+#endif\n+\n+/***********************************************************************\n+ * swigrun.swg\n+ *\n+ * This file contains generic CAPI SWIG runtime support for pointer\n+ * type checking.\n+ *\n+ ************************************************************************/\n+\n+/* This should only be incremented when either the layout of swig_type_info changes,\n+ or for whatever reason, the runtime changes incompatibly */\n+#define SWIG_RUNTIME_VERSION \"2\"\n+\n+/* define SWIG_TYPE_TABLE_NAME as \"SWIG_TYPE_TABLE\" */\n+#ifdef SWIG_TYPE_TABLE\n+# define SWIG_QUOTE_STRING(x) #x\n+# define SWIG_EXPAND_AND_QUOTE_STRING(x) SWIG_QUOTE_STRING(x)\n+# define SWIG_TYPE_TABLE_NAME SWIG_EXPAND_AND_QUOTE_STRING(SWIG_TYPE_TABLE)\n+#else\n+# define SWIG_TYPE_TABLE_NAME\n+#endif\n+\n+/*\n+ You can use the SWIGRUNTIME and SWIGRUNTIMEINLINE macros for\n+ creating a static or dynamic library from the swig runtime code.\n+ In 99.9% of the cases, swig just needs to declare them as 'static'.\n+ \n+ But only do this if is strictly necessary, ie, if you have problems\n+ with your compiler or so.\n+*/\n+\n+#ifndef SWIGRUNTIME\n+# define SWIGRUNTIME SWIGINTERN\n+#endif\n+\n+#ifndef SWIGRUNTIMEINLINE\n+# define SWIGRUNTIMEINLINE SWIGRUNTIME SWIGINLINE\n+#endif\n+\n+#include \n+\n+#ifdef __cplusplus\n+extern \"C\" {\n+#endif\n+\n+typedef void *(*swig_converter_func)(void *);\n+typedef struct swig_type_info *(*swig_dycast_func)(void **);\n+\n+/* Structure to store inforomation on one type */\n+typedef struct swig_type_info {\n+ const char *name;\t\t\t/* mangled name of this type */\n+ const char *str;\t\t\t/* human readable name of this type */\n+ swig_dycast_func dcast;\t\t/* dynamic cast function down a hierarchy */\n+ struct swig_cast_info *cast;\t\t\t/* linked list of types that can cast into this type */\n+ void *clientdata;\t\t/* language specific type data */\n+} swig_type_info;\n+\n+/* Structure to store a type and conversion function used for casting */\n+typedef struct swig_cast_info {\n+ swig_type_info *type;\t\t\t/* pointer to type that is equivalent to this type */\n+ swig_converter_func converter;\t\t/* function to cast the void pointers */\n+ struct swig_cast_info *next;\t\t\t/* pointer to next cast in linked list */\n+ struct swig_cast_info *prev;\t\t\t/* pointer to the previous cast */\n+} swig_cast_info;\n+\n+/* Structure used to store module information\n+ * Each module generates one structure like this, and the runtime collects\n+ * all of these structures and stores them in a circularly linked list.*/\n+typedef struct swig_module_info {\n+ swig_type_info **types;\t\t/* Array of pointers to swig_type_info structures that are in this module */\n+ size_t size;\t\t /* Number of types in this module */\n+ struct swig_module_info *next;\t\t/* Pointer to next element in circularly linked list */\n+ swig_type_info **type_initial;\t/* Array of initially generated type structures */\n+ swig_cast_info **cast_initial;\t/* Array of initially generated casting structures */\n+ void *clientdata;\t\t/* Language specific module data */\n+} swig_module_info;\n+\n+\n+/* \n+ Compare two type names skipping the space characters, therefore\n+ \"char*\" == \"char *\" and \"Class\" == \"Class\", etc.\n+\n+ Return 0 when the two name types are equivalent, as in\n+ strncmp, but skipping ' '.\n+*/\n+SWIGRUNTIME int\n+SWIG_TypeNameComp(const char *f1, const char *l1,\n+\t\t const char *f2, const char *l2) {\n+ for (;(f1 != l1) && (f2 != l2); ++f1, ++f2) {\n+ while ((*f1 == ' ') && (f1 != l1)) ++f1;\n+ while ((*f2 == ' ') && (f2 != l2)) ++f2;\n+ if (*f1 != *f2) return (int)(*f1 - *f2);\n+ }\n+ return (l1 - f1) - (l2 - f2);\n+}\n+\n+/*\n+ Check type equivalence in a name list like ||...\n+ Return 0 if not equal, 1 if equal\n+*/\n+SWIGRUNTIME int\n+SWIG_TypeEquiv(const char *nb, const char *tb) {\n+ int equiv = 0;\n+ const char* te = tb + strlen(tb);\n+ const char* ne = nb;\n+ while (!equiv && *ne) {\n+ for (nb = ne; *ne; ++ne) {\n+ if (*ne == '|') break;\n+ }\n+ equiv = (SWIG_TypeNameComp(nb, ne, tb, te) == 0) ? 1 : 0;\n+ if (*ne) ++ne;\n+ }\n+ return equiv;\n+}\n+\n+/*\n+ Check type equivalence in a name list like ||...\n+ Return 0 if equal, -1 if nb < tb, 1 if nb > tb\n+*/\n+SWIGRUNTIME int\n+SWIG_TypeCompare(const char *nb, const char *tb) {\n+ int equiv = 0;\n+ const char* te = tb + strlen(tb);\n+ const char* ne = nb;\n+ while (!equiv && *ne) {\n+ for (nb = ne; *ne; ++ne) {\n+ if (*ne == '|') break;\n+ }\n+ equiv = (SWIG_TypeNameComp(nb, ne, tb, te) == 0) ? 1 : 0;\n+ if (*ne) ++ne;\n+ }\n+ return equiv;\n+}\n+\n+\n+/* think of this as a c++ template<> or a scheme macro */\n+#define SWIG_TypeCheck_Template(comparison, ty) \\\n+ if (ty) { \\\n+ swig_cast_info *iter = ty->cast; \\\n+ while (iter) { \\\n+ if (comparison) { \\\n+ if (iter == ty->cast) return iter; \\\n+ /* Move iter to the top of the linked list */ \\\n+ iter->prev->next = iter->next; \\\n+ if (iter->next) \\\n+ iter->next->prev = iter->prev; \\\n+ iter->next = ty->cast; \\\n+ iter->prev = 0; \\\n+ if (ty->cast) ty->cast->prev = iter; \\\n+ ty->cast = iter; \\\n+ return iter; \\\n+ } \\\n+ iter = iter->next; \\\n+ } \\\n+ } \\\n+ return 0\n+\n+/*\n+ Check the typename\n+*/\n+SWIGRUNTIME swig_cast_info *\n+SWIG_TypeCheck(const char *c, swig_type_info *ty) {\n+ SWIG_TypeCheck_Template(strcmp(iter->type->name, c) == 0, ty);\n+}\n+\n+/* Same as previous function, except strcmp is replaced with a pointer comparison */\n+SWIGRUNTIME swig_cast_info *\n+SWIG_TypeCheckStruct(swig_type_info *from, swig_type_info *into) {\n+ SWIG_TypeCheck_Template(iter->type == from, into);\n+}\n+\n+/*\n+ Cast a pointer up an inheritance hierarchy\n+*/\n+SWIGRUNTIMEINLINE void *\n+SWIG_TypeCast(swig_cast_info *ty, void *ptr) {\n+ return ((!ty) || (!ty->converter)) ? ptr : (*ty->converter)(ptr);\n+}\n+\n+/* \n+ Dynamic pointer casting. Down an inheritance hierarchy\n+*/\n+SWIGRUNTIME swig_type_info *\n+SWIG_TypeDynamicCast(swig_type_info *ty, void **ptr) {\n+ swig_type_info *lastty = ty;\n+ if (!ty || !ty->dcast) return ty;\n+ while (ty && (ty->dcast)) {\n+ ty = (*ty->dcast)(ptr);\n+ if (ty) lastty = ty;\n+ }\n+ return lastty;\n+}\n+\n+/*\n+ Return the name associated with this type\n+*/\n+SWIGRUNTIMEINLINE const char *\n+SWIG_TypeName(const swig_type_info *ty) {\n+ return ty->name;\n+}\n+\n+/*\n+ Return the pretty name associated with this type,\n+ that is an unmangled type name in a form presentable to the user.\n+*/\n+SWIGRUNTIME const char *\n+SWIG_TypePrettyName(const swig_type_info *type) {\n+ /* The \"str\" field contains the equivalent pretty names of the\n+ type, separated by vertical-bar characters. We choose\n+ to print the last name, as it is often (?) the most\n+ specific. */\n+ if (type->str != NULL) {\n+ const char *last_name = type->str;\n+ const char *s;\n+ for (s = type->str; *s; s++)\n+ if (*s == '|') last_name = s+1;\n+ return last_name;\n+ }\n+ else\n+ return type->name;\n+}\n+\n+/* \n+ Set the clientdata field for a type\n+*/\n+SWIGRUNTIME void\n+SWIG_TypeClientData(swig_type_info *ti, void *clientdata) {\n+ if (!ti->clientdata) {\n+ swig_cast_info *cast = ti->cast;\n+ /* if (ti->clientdata == clientdata) return; */\n+ ti->clientdata = clientdata;\n+ \n+ while (cast) {\n+ if (!cast->converter)\n+\tSWIG_TypeClientData(cast->type, clientdata);\n+ cast = cast->next;\n+ }\n+ }\n+}\n+\n+/*\n+ Search for a swig_type_info structure only by mangled name\n+ Search is a O(log #types)\n+ \n+ We start searching at module start, and finish searching when start == end. \n+ Note: if start == end at the beginning of the function, we go all the way around\n+ the circular list.\n+*/\n+SWIGRUNTIME swig_type_info *\n+SWIG_MangledTypeQueryModule(swig_module_info *start, \n+ swig_module_info *end, \n+\t\t const char *name) {\n+ swig_module_info *iter = start;\n+ do {\n+ if (iter->size) {\n+ register size_t l = 0;\n+ register size_t r = iter->size - 1;\n+ do {\n+\t/* since l+r >= 0, we can (>> 1) instead (/ 2) */\n+\tregister size_t i = (l + r) >> 1; \n+\tconst char *iname = iter->types[i]->name;\n+\tif (iname) {\n+\t register int compare = strcmp(name, iname);\n+\t if (compare == 0) {\t \n+\t return iter->types[i];\n+\t } else if (compare < 0) {\n+\t if (i) {\n+\t r = i - 1;\n+\t } else {\n+\t break;\n+\t }\n+\t } else if (compare > 0) {\n+\t l = i + 1;\n+\t }\n+\t} else {\n+\t break; /* should never happen */\n+\t}\n+ } while (l <= r);\n+ }\n+ iter = iter->next;\n+ } while (iter != end);\n+ return 0;\n+}\n+\n+/*\n+ Search for a swig_type_info structure for either a mangled name or a human readable name.\n+ It first searches the mangled names of the types, which is a O(log #types)\n+ If a type is not found it then searches the human readable names, which is O(#types).\n+ \n+ We start searching at module start, and finish searching when start == end. \n+ Note: if start == end at the beginning of the function, we go all the way around\n+ the circular list.\n+*/\n+SWIGRUNTIME swig_type_info *\n+SWIG_TypeQueryModule(swig_module_info *start, \n+ swig_module_info *end, \n+\t\t const char *name) {\n+ /* STEP 1: Search the name field using binary search */\n+ swig_type_info *ret = SWIG_MangledTypeQueryModule(start, end, name);\n+ if (ret) {\n+ return ret;\n+ } else {\n+ /* STEP 2: If the type hasn't been found, do a complete search\n+ of the str field (the human readable name) */\n+ swig_module_info *iter = start;\n+ do {\n+ register size_t i = 0;\n+ for (; i < iter->size; ++i) {\n+\tif (iter->types[i]->str && (SWIG_TypeEquiv(iter->types[i]->str, name)))\n+\t return iter->types[i];\n+ }\n+ iter = iter->next;\n+ } while (iter != end);\n+ }\n+ \n+ /* neither found a match */\n+ return 0;\n+}\n+\n+\n+/* \n+ Pack binary data into a string\n+*/\n+SWIGRUNTIME char *\n+SWIG_PackData(char *c, void *ptr, size_t sz) {\n+ static const char hex[17] = \"0123456789abcdef\";\n+ register const unsigned char *u = (unsigned char *) ptr;\n+ register const unsigned char *eu = u + sz;\n+ for (; u != eu; ++u) {\n+ register unsigned char uu = *u;\n+ *(c++) = hex[(uu & 0xf0) >> 4];\n+ *(c++) = hex[uu & 0xf];\n+ }\n+ return c;\n+}\n+\n+/* \n+ Unpack binary data from a string\n+*/\n+SWIGRUNTIME const char *\n+SWIG_UnpackData(const char *c, void *ptr, size_t sz) {\n+ register unsigned char *u = (unsigned char *) ptr;\n+ register const unsigned char *eu = u + sz;\n+ for (; u != eu; ++u) {\n+ register char d = *(c++);\n+ register unsigned char uu = 0;\n+ if ((d >= '0') && (d <= '9'))\n+ uu = ((d - '0') << 4);\n+ else if ((d >= 'a') && (d <= 'f'))\n+ uu = ((d - ('a'-10)) << 4);\n+ else \n+ return (char *) 0;\n+ d = *(c++);\n+ if ((d >= '0') && (d <= '9'))\n+ uu |= (d - '0');\n+ else if ((d >= 'a') && (d <= 'f'))\n+ uu |= (d - ('a'-10));\n+ else \n+ return (char *) 0;\n+ *u = uu;\n+ }\n+ return c;\n+}\n+\n+/* \n+ Pack 'void *' into a string buffer.\n+*/\n+SWIGRUNTIME char *\n+SWIG_PackVoidPtr(char *buff, void *ptr, const char *name, size_t bsz) {\n+ char *r = buff;\n+ if ((2*sizeof(void *) + 2) > bsz) return 0;\n+ *(r++) = '_';\n+ r = SWIG_PackData(r,&ptr,sizeof(void *));\n+ if (strlen(name) + 1 > (bsz - (r - buff))) return 0;\n+ strcpy(r,name);\n+ return buff;\n+}\n+\n+SWIGRUNTIME const char *\n+SWIG_UnpackVoidPtr(const char *c, void **ptr, const char *name) {\n+ if (*c != '_') {\n+ if (strcmp(c,\"NULL\") == 0) {\n+ *ptr = (void *) 0;\n+ return name;\n+ } else {\n+ return 0;\n+ }\n+ }\n+ return SWIG_UnpackData(++c,ptr,sizeof(void *));\n+}\n+\n+SWIGRUNTIME char *\n+SWIG_PackDataName(char *buff, void *ptr, size_t sz, const char *name, size_t bsz) {\n+ char *r = buff;\n+ size_t lname = (name ? strlen(name) : 0);\n+ if ((2*sz + 2 + lname) > bsz) return 0;\n+ *(r++) = '_';\n+ r = SWIG_PackData(r,ptr,sz);\n+ if (lname) {\n+ strncpy(r,name,lname+1);\n+ } else {\n+ *r = 0;\n+ }\n+ return buff;\n+}\n+\n+SWIGRUNTIME const char *\n+SWIG_UnpackDataName(const char *c, void *ptr, size_t sz, const char *name) {\n+ if (*c != '_') {\n+ if (strcmp(c,\"NULL\") == 0) {\n+ memset(ptr,0,sz);\n+ return name;\n+ } else {\n+ return 0;\n+ }\n+ }\n+ return SWIG_UnpackData(++c,ptr,sz);\n+}\n+\n+#ifdef __cplusplus\n+}\n+#endif\n+\n+/***********************************************************************\n+ * pyrun.swg\n+ *\n+ * This file contains the runtime support for Python modules\n+ * and includes code for managing global variables and pointer\n+ * type checking.\n+ *\n+ * Author : David Beazley (beazley@cs.uchicago.edu)\n+ ************************************************************************/\n+\n+/* Common SWIG API */\n+#define SWIG_ConvertPtr(obj, pp, type, flags) SWIG_Python_ConvertPtr(obj, pp, type, flags)\n+#define SWIG_NewPointerObj(p, type, flags) SWIG_Python_NewPointerObj(p, type, flags)\n+#define SWIG_MustGetPtr(p, type, argnum, flags) SWIG_Python_MustGetPtr(p, type, argnum, flags)\n+ \n+\n+/* Python-specific SWIG API */\n+#define SWIG_ConvertPacked(obj, ptr, sz, ty, flags) SWIG_Python_ConvertPacked(obj, ptr, sz, ty, flags)\n+#define SWIG_NewPackedObj(ptr, sz, type) SWIG_Python_NewPackedObj(ptr, sz, type)\n+\n+/* Runtime API */\n+#define SWIG_GetModule(clientdata) SWIG_Python_GetModule()\n+#define SWIG_SetModule(clientdata, pointer) SWIG_Python_SetModule(pointer)\n+\n+/* -----------------------------------------------------------------------------\n+ * Pointer declarations\n+ * ----------------------------------------------------------------------------- */\n+/*\n+ Use SWIG_NO_COBJECT_TYPES to force the use of strings to represent\n+ C/C++ pointers in the python side. Very useful for debugging, but\n+ not always safe.\n+*/\n+#if !defined(SWIG_NO_COBJECT_TYPES) && !defined(SWIG_COBJECT_TYPES)\n+# define SWIG_COBJECT_TYPES\n+#endif\n+\n+/* Flags for pointer conversion */\n+#define SWIG_POINTER_EXCEPTION 0x1\n+#define SWIG_POINTER_DISOWN 0x2\n+\n+\n+/* Add PyOS_snprintf for old Pythons */\n+#if PY_VERSION_HEX < 0x02020000\n+#define PyOS_snprintf snprintf\n+#endif\n+\n+#ifdef __cplusplus\n+extern \"C\" {\n+#endif\n+\n+/* -----------------------------------------------------------------------------\n+ * Create a new pointer string \n+ * ----------------------------------------------------------------------------- */\n+#ifndef SWIG_BUFFER_SIZE\n+#define SWIG_BUFFER_SIZE 1024\n+#endif\n+\n+#if defined(SWIG_COBJECT_TYPES)\n+#if !defined(SWIG_COBJECT_PYTHON)\n+/* -----------------------------------------------------------------------------\n+ * Implements a simple Swig Object type, and use it instead of PyCObject\n+ * ----------------------------------------------------------------------------- */\n+\n+typedef struct {\n+ PyObject_HEAD\n+ void *ptr;\n+ const char *desc;\n+} PySwigObject;\n+\n+/* Declarations for objects of type PySwigObject */\n+\n+SWIGRUNTIME int\n+PySwigObject_print(PySwigObject *v, FILE *fp, int flags)\n+{\n+ char result[SWIG_BUFFER_SIZE];\n+ flags = flags;\n+ if (SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result))) {\n+ fputs(\"\", fp);\n+ return 0; \n+ } else {\n+ return 1; \n+ }\n+}\n+ \n+SWIGRUNTIME PyObject *\n+PySwigObject_repr(PySwigObject *v)\n+{\n+ char result[SWIG_BUFFER_SIZE];\n+ return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?\n+ PyString_FromFormat(\"\", result) : 0;\n+}\n+\n+SWIGRUNTIME PyObject *\n+PySwigObject_str(PySwigObject *v)\n+{\n+ char result[SWIG_BUFFER_SIZE];\n+ return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?\n+ PyString_FromString(result) : 0;\n+}\n+\n+SWIGRUNTIME PyObject *\n+PySwigObject_long(PySwigObject *v)\n+{\n+ return PyLong_FromVoidPtr(v->ptr);\n+}\n+\n+SWIGRUNTIME PyObject *\n+PySwigObject_format(const char* fmt, PySwigObject *v)\n+{\n+ PyObject *res = NULL;\n+ PyObject *args = PyTuple_New(1);\n+ if (args && (PyTuple_SetItem(args, 0, PySwigObject_long(v)) == 0)) {\n+ PyObject *ofmt = PyString_FromString(fmt);\n+ if (ofmt) {\n+ res = PyString_Format(ofmt,args);\n+ Py_DECREF(ofmt);\n+ }\n+ Py_DECREF(args);\n+ } \n+ return res;\n+}\n+\n+SWIGRUNTIME PyObject *\n+PySwigObject_oct(PySwigObject *v)\n+{\n+ return PySwigObject_format(\"%o\",v);\n+}\n+\n+SWIGRUNTIME PyObject *\n+PySwigObject_hex(PySwigObject *v)\n+{\n+ return PySwigObject_format(\"%x\",v);\n+}\n+\n+SWIGRUNTIME int\n+PySwigObject_compare(PySwigObject *v, PySwigObject *w)\n+{\n+ int c = strcmp(v->desc, w->desc);\n+ if (c) {\n+ return (c > 0) ? 1 : -1;\n+ } else {\n+ void *i = v->ptr;\n+ void *j = w->ptr;\n+ return (i < j) ? -1 : ((i > j) ? 1 : 0);\n+ }\n+}\n+\n+SWIGRUNTIME void\n+PySwigObject_dealloc(PySwigObject *self)\n+{\n+ PyObject_DEL(self);\n+}\n+\n+SWIGRUNTIME PyTypeObject*\n+PySwigObject_type(void) {\n+ static char pyswigobject_type__doc__[] = \n+ \"Swig object carries a C/C++ instance pointer\";\n+ \n+ static PyNumberMethods PySwigObject_as_number = {\n+ (binaryfunc)0, /*nb_add*/\n+ (binaryfunc)0, /*nb_subtract*/\n+ (binaryfunc)0, /*nb_multiply*/\n+ (binaryfunc)0, /*nb_divide*/\n+ (binaryfunc)0, /*nb_remainder*/\n+ (binaryfunc)0, /*nb_divmod*/\n+ (ternaryfunc)0,/*nb_power*/\n+ (unaryfunc)0, /*nb_negative*/\n+ (unaryfunc)0, /*nb_positive*/\n+ (unaryfunc)0, /*nb_absolute*/\n+ (inquiry)0, /*nb_nonzero*/\n+ 0,\t\t /*nb_invert*/\n+ 0,\t\t /*nb_lshift*/\n+ 0,\t\t /*nb_rshift*/\n+ 0,\t\t /*nb_and*/\n+ 0,\t\t /*nb_xor*/\n+ 0,\t\t /*nb_or*/\n+ (coercion)0, /*nb_coerce*/\n+ (unaryfunc)PySwigObject_long, /*nb_int*/\n+ (unaryfunc)PySwigObject_long, /*nb_long*/\n+ (unaryfunc)0, /*nb_float*/\n+ (unaryfunc)PySwigObject_oct, /*nb_oct*/\n+ (unaryfunc)PySwigObject_hex, /*nb_hex*/\n+#if PY_VERSION_HEX >= 0x02000000\n+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 /* nb_inplace_add -> nb_inplace_true_divide */ \n+#endif\n+ };\n+\n+ static PyTypeObject pyswigobject_type\n+#if !defined(__cplusplus)\n+ ; \n+ static int type_init = 0;\n+ if (!type_init) {\n+ PyTypeObject tmp\n+#endif\n+ = {\n+ PyObject_HEAD_INIT(&PyType_Type)\n+ 0,\t\t\t\t\t/*ob_size*/\n+ \"PySwigObject\",\t\t\t/*tp_name*/\n+ sizeof(PySwigObject),\t\t/*tp_basicsize*/\n+ 0,\t\t\t\t\t/*tp_itemsize*/\n+ /* methods */\n+ (destructor)PySwigObject_dealloc,\t/*tp_dealloc*/\n+ (printfunc)PySwigObject_print,\t/*tp_print*/\n+ (getattrfunc)0,\t\t\t/*tp_getattr*/\n+ (setattrfunc)0,\t\t\t/*tp_setattr*/\n+ (cmpfunc)PySwigObject_compare,\t/*tp_compare*/\n+ (reprfunc)PySwigObject_repr,\t/*tp_repr*/\n+ &PySwigObject_as_number,\t /*tp_as_number*/\n+ 0,\t\t\t\t\t/*tp_as_sequence*/\n+ 0,\t\t\t\t\t/*tp_as_mapping*/\n+ (hashfunc)0,\t\t\t/*tp_hash*/\n+ (ternaryfunc)0,\t\t\t/*tp_call*/\n+ (reprfunc)PySwigObject_str,\t\t/*tp_str*/\n+ /* Space for future expansion */\n+ 0,0,0,0,\n+ pyswigobject_type__doc__, \t /* Documentation string */\n+#if PY_VERSION_HEX >= 0x02000000\n+ 0, /* tp_traverse */\n+ 0, /* tp_clear */\n+#endif\n+#if PY_VERSION_HEX >= 0x02010000\n+ 0, /* tp_richcompare */\n+ 0, /* tp_weaklistoffset */\n+#endif\n+#if PY_VERSION_HEX >= 0x02020000\n+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */\n+#endif\n+#if PY_VERSION_HEX >= 0x02030000\n+ 0, /* tp_del */\n+#endif\n+#ifdef COUNT_ALLOCS\n+ 0,0,0,0 /* tp_alloc -> tp_next */\n+#endif\n+ };\n+#if !defined(__cplusplus)\n+ pyswigobject_type = tmp;\n+ type_init = 1;\n+ }\n+#endif\n+ return &pyswigobject_type;\n+}\n+\n+SWIGRUNTIME PyObject *\n+PySwigObject_FromVoidPtrAndDesc(void *ptr, const char *desc)\n+{\n+ PySwigObject *self = PyObject_NEW(PySwigObject, PySwigObject_type());\n+ if (self) {\n+ self->ptr = ptr;\n+ self->desc = desc;\n+ }\n+ return (PyObject *)self;\n+}\n+\n+SWIGRUNTIMEINLINE void *\n+PySwigObject_AsVoidPtr(PyObject *self)\n+{\n+ return ((PySwigObject *)self)->ptr;\n+}\n+\n+SWIGRUNTIMEINLINE const char *\n+PySwigObject_GetDesc(PyObject *self)\n+{\n+ return ((PySwigObject *)self)->desc;\n+}\n+\n+SWIGRUNTIMEINLINE int\n+PySwigObject_Check(PyObject *op) {\n+ return ((op)->ob_type == PySwigObject_type()) \n+ || (strcmp((op)->ob_type->tp_name,\"PySwigObject\") == 0);\n+}\n+\n+/* -----------------------------------------------------------------------------\n+ * Implements a simple Swig Packed type, and use it instead of string\n+ * ----------------------------------------------------------------------------- */\n+\n+typedef struct {\n+ PyObject_HEAD\n+ void *pack;\n+ const char *desc;\n+ size_t size;\n+} PySwigPacked;\n+\n+SWIGRUNTIME int\n+PySwigPacked_print(PySwigPacked *v, FILE *fp, int flags)\n+{\n+ char result[SWIG_BUFFER_SIZE];\n+ flags = flags;\n+ fputs(\"pack, v->size, 0, sizeof(result))) {\n+ fputs(\"at \", fp); \n+ fputs(result, fp); \n+ }\n+ fputs(v->desc,fp); \n+ fputs(\">\", fp);\n+ return 0; \n+}\n+ \n+SWIGRUNTIME PyObject *\n+PySwigPacked_repr(PySwigPacked *v)\n+{\n+ char result[SWIG_BUFFER_SIZE];\n+ if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))) {\n+ return PyString_FromFormat(\"\", result, v->desc);\n+ } else {\n+ return PyString_FromFormat(\"\", v->desc);\n+ } \n+}\n+\n+SWIGRUNTIME PyObject *\n+PySwigPacked_str(PySwigPacked *v)\n+{\n+ char result[SWIG_BUFFER_SIZE];\n+ if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))){\n+ return PyString_FromFormat(\"%s%s\", result, v->desc);\n+ } else {\n+ return PyString_FromFormat(\"%s\", v->desc);\n+ } \n+}\n+\n+SWIGRUNTIME int\n+PySwigPacked_compare(PySwigPacked *v, PySwigPacked *w)\n+{\n+ int c = strcmp(v->desc, w->desc);\n+ if (c) {\n+ return (c > 0) ? 1 : -1;\n+ } else {\n+ size_t i = v->size;\n+ size_t j = w->size;\n+ int s = (i < j) ? -1 : ((i > j) ? 1 : 0);\n+ return s ? s : strncmp((char *)v->pack, (char *)w->pack, 2*v->size);\n+ }\n+}\n+\n+SWIGRUNTIME void\n+PySwigPacked_dealloc(PySwigPacked *self)\n+{\n+ free(self->pack);\n+ PyObject_DEL(self);\n+}\n+\n+SWIGRUNTIME PyTypeObject*\n+PySwigPacked_type(void) {\n+ static char pyswigpacked_type__doc__[] = \n+ \"Swig object carries a C/C++ instance pointer\";\n+ static PyTypeObject pyswigpacked_type\n+#if !defined(__cplusplus)\n+ ;\n+ static int type_init = 0; \n+ if (!type_init) {\n+ PyTypeObject tmp\n+#endif\n+ = {\n+ PyObject_HEAD_INIT(&PyType_Type)\n+ 0,\t\t\t\t\t/*ob_size*/\n+ \"PySwigPacked\",\t\t\t/*tp_name*/\n+ sizeof(PySwigPacked),\t\t/*tp_basicsize*/\n+ 0,\t\t\t\t\t/*tp_itemsize*/\n+ /* methods */\n+ (destructor)PySwigPacked_dealloc,\t/*tp_dealloc*/\n+ (printfunc)PySwigPacked_print,\t/*tp_print*/\n+ (getattrfunc)0,\t\t\t/*tp_getattr*/\n+ (setattrfunc)0,\t\t\t/*tp_setattr*/\n+ (cmpfunc)PySwigPacked_compare,\t/*tp_compare*/\n+ (reprfunc)PySwigPacked_repr,\t/*tp_repr*/\n+ 0,\t /*tp_as_number*/\n+ 0,\t\t\t\t\t/*tp_as_sequence*/\n+ 0,\t\t\t\t\t/*tp_as_mapping*/\n+ (hashfunc)0,\t\t\t/*tp_hash*/\n+ (ternaryfunc)0,\t\t\t/*tp_call*/\n+ (reprfunc)PySwigPacked_str,\t\t/*tp_str*/\n+ /* Space for future expansion */\n+ 0,0,0,0,\n+ pyswigpacked_type__doc__, \t /* Documentation string */\n+#if PY_VERSION_HEX >= 0x02000000\n+ 0, /* tp_traverse */\n+ 0, /* tp_clear */\n+#endif\n+#if PY_VERSION_HEX >= 0x02010000\n+ 0, /* tp_richcompare */\n+ 0, /* tp_weaklistoffset */\n+#endif\n+#if PY_VERSION_HEX >= 0x02020000 \n+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */\n+#endif\n+#if PY_VERSION_HEX >= 0x02030000\n+ 0, /* tp_del */\n+#endif\n+#ifdef COUNT_ALLOCS\n+ 0,0,0,0 /* tp_alloc -> tp_next */\n+#endif\n+ };\n+#if !defined(__cplusplus)\n+ pyswigpacked_type = tmp;\n+ type_init = 1;\n+ }\n+#endif\n+ return &pyswigpacked_type;\n+}\n+\n+SWIGRUNTIME PyObject *\n+PySwigPacked_FromDataAndDesc(void *ptr, size_t size, const char *desc)\n+{\n+ PySwigPacked *self = PyObject_NEW(PySwigPacked, PySwigPacked_type());\n+ if (self == NULL) {\n+ return NULL;\n+ } else {\n+ void *pack = malloc(size);\n+ if (pack) {\n+ memcpy(pack, ptr, size);\n+ self->pack = pack;\n+ self->desc = desc;\n+ self->size = size;\n+ return (PyObject *) self;\n+ }\n+ return NULL;\n+ }\n+}\n+\n+SWIGRUNTIMEINLINE const char *\n+PySwigPacked_UnpackData(PyObject *obj, void *ptr, size_t size)\n+{\n+ PySwigPacked *self = (PySwigPacked *)obj;\n+ if (self->size != size) return 0;\n+ memcpy(ptr, self->pack, size);\n+ return self->desc;\n+}\n+\n+SWIGRUNTIMEINLINE const char *\n+PySwigPacked_GetDesc(PyObject *self)\n+{\n+ return ((PySwigPacked *)self)->desc;\n+}\n+\n+SWIGRUNTIMEINLINE int\n+PySwigPacked_Check(PyObject *op) {\n+ return ((op)->ob_type == PySwigPacked_type()) \n+ || (strcmp((op)->ob_type->tp_name,\"PySwigPacked\") == 0);\n+}\n+\n+#else\n+/* -----------------------------------------------------------------------------\n+ * Use the old Python PyCObject instead of PySwigObject\n+ * ----------------------------------------------------------------------------- */\n+\n+#define PySwigObject_GetDesc(obj)\t PyCObject_GetDesc(obj)\n+#define PySwigObject_Check(obj)\t PyCObject_Check(obj)\n+#define PySwigObject_AsVoidPtr(obj)\t PyCObject_AsVoidPtr(obj)\n+#define PySwigObject_FromVoidPtrAndDesc(p, d) PyCObject_FromVoidPtrAndDesc(p, d, NULL)\n+\n+#endif\n+\n+#endif\n+\n+/* -----------------------------------------------------------------------------\n+ * errors manipulation\n+ * ----------------------------------------------------------------------------- */\n+\n+SWIGRUNTIME void\n+SWIG_Python_TypeError(const char *type, PyObject *obj)\n+{\n+ if (type) {\n+#if defined(SWIG_COBJECT_TYPES)\n+ if (obj && PySwigObject_Check(obj)) {\n+ const char *otype = (const char *) PySwigObject_GetDesc(obj);\n+ if (otype) {\n+\tPyErr_Format(PyExc_TypeError, \"a '%s' is expected, 'PySwigObject(%s)' is received\",\n+\t\t type, otype);\n+\treturn;\n+ }\n+ } else \n+#endif \n+ {\n+ const char *otype = (obj ? obj->ob_type->tp_name : 0); \n+ if (otype) {\n+\tPyObject *str = PyObject_Str(obj);\n+\tconst char *cstr = str ? PyString_AsString(str) : 0;\n+\tif (cstr) {\n+\t PyErr_Format(PyExc_TypeError, \"a '%s' is expected, '%s(%s)' is received\",\n+\t\t type, otype, cstr);\n+\t} else {\n+\t PyErr_Format(PyExc_TypeError, \"a '%s' is expected, '%s' is received\",\n+\t\t type, otype);\n+\t}\n+\tPy_XDECREF(str);\n+\treturn;\n+ }\n+ } \n+ PyErr_Format(PyExc_TypeError, \"a '%s' is expected\", type);\n+ } else {\n+ PyErr_Format(PyExc_TypeError, \"unexpected type is received\");\n+ }\n+}\n+\n+SWIGRUNTIMEINLINE void\n+SWIG_Python_NullRef(const char *type)\n+{\n+ if (type) {\n+ PyErr_Format(PyExc_TypeError, \"null reference of type '%s' was received\",type);\n+ } else {\n+ PyErr_Format(PyExc_TypeError, \"null reference was received\");\n+ }\n+}\n+\n+SWIGRUNTIME int\n+SWIG_Python_AddErrMesg(const char* mesg, int infront)\n+{\n+ if (PyErr_Occurred()) {\n+ PyObject *type = 0;\n+ PyObject *value = 0;\n+ PyObject *traceback = 0;\n+ PyErr_Fetch(&type, &value, &traceback);\n+ if (value) {\n+ PyObject *old_str = PyObject_Str(value);\n+ Py_XINCREF(type);\n+ PyErr_Clear();\n+ if (infront) {\n+\tPyErr_Format(type, \"%s %s\", mesg, PyString_AsString(old_str));\n+ } else {\n+\tPyErr_Format(type, \"%s %s\", PyString_AsString(old_str), mesg);\n+ }\n+ Py_DECREF(old_str);\n+ }\n+ return 1;\n+ } else {\n+ return 0;\n+ }\n+}\n+\n+SWIGRUNTIME int\n+SWIG_Python_ArgFail(int argnum)\n+{\n+ if (PyErr_Occurred()) {\n+ /* add information about failing argument */\n+ char mesg[256];\n+ PyOS_snprintf(mesg, sizeof(mesg), \"argument number %d:\", argnum);\n+ return SWIG_Python_AddErrMesg(mesg, 1);\n+ } else {\n+ return 0;\n+ }\n+}\n+\n+\n+/* -----------------------------------------------------------------------------\n+ * pointers/data manipulation\n+ * ----------------------------------------------------------------------------- */\n+\n+/* Convert a pointer value */\n+SWIGRUNTIME int\n+SWIG_Python_ConvertPtr(PyObject *obj, void **ptr, swig_type_info *ty, int flags) {\n+ swig_cast_info *tc;\n+ const char *c = 0;\n+ static PyObject *SWIG_this = 0;\n+ int newref = 0;\n+ PyObject *pyobj = 0;\n+ void *vptr;\n+ \n+ if (!obj) return 0;\n+ if (obj == Py_None) {\n+ *ptr = 0;\n+ return 0;\n+ }\n+\n+#ifdef SWIG_COBJECT_TYPES\n+ if (!(PySwigObject_Check(obj))) {\n+ if (!SWIG_this)\n+ SWIG_this = PyString_FromString(\"this\");\n+ pyobj = obj;\n+ obj = PyObject_GetAttr(obj,SWIG_this);\n+ newref = 1;\n+ if (!obj) goto type_error;\n+ if (!PySwigObject_Check(obj)) {\n+ Py_DECREF(obj);\n+ goto type_error;\n+ }\n+ } \n+ vptr = PySwigObject_AsVoidPtr(obj);\n+ c = (const char *) PySwigObject_GetDesc(obj);\n+ if (newref) { Py_DECREF(obj); }\n+ goto type_check;\n+#else\n+ if (!(PyString_Check(obj))) {\n+ if (!SWIG_this)\n+ SWIG_this = PyString_FromString(\"this\");\n+ pyobj = obj;\n+ obj = PyObject_GetAttr(obj,SWIG_this);\n+ newref = 1;\n+ if (!obj) goto type_error;\n+ if (!PyString_Check(obj)) {\n+ Py_DECREF(obj);\n+ goto type_error;\n+ }\n+ } \n+ c = PyString_AS_STRING(obj);\n+ /* Pointer values must start with leading underscore */\n+ c = SWIG_UnpackVoidPtr(c, &vptr, ty->name);\n+ if (newref) { Py_DECREF(obj); }\n+ if (!c) goto type_error;\n+#endif\n+\n+type_check:\n+ if (ty) {\n+ tc = SWIG_TypeCheck(c,ty);\n+ if (!tc) goto type_error;\n+ *ptr = SWIG_TypeCast(tc,vptr);\n+ } else {\n+ *ptr = vptr;\n+ }\n+ if ((pyobj) && (flags & SWIG_POINTER_DISOWN)) {\n+ PyObject_SetAttrString(pyobj,(char*)\"thisown\",Py_False);\n+ }\n+ return 0;\n+\n+type_error:\n+ PyErr_Clear();\n+ if (pyobj && !obj) { \n+ obj = pyobj;\n+ if (PyCFunction_Check(obj)) {\n+ /* here we get the method pointer for callbacks */\n+ char *doc = (((PyCFunctionObject *)obj) -> m_ml -> ml_doc);\n+ c = doc ? strstr(doc, \"swig_ptr: \") : 0;\n+ if (c) {\n+\tc = ty ? SWIG_UnpackVoidPtr(c + 10, &vptr, ty->name) : 0;\n+\tif (!c) goto type_error;\n+\tgoto type_check;\n+ }\n+ }\n+ }\n+ if (flags & SWIG_POINTER_EXCEPTION) {\n+ if (ty) {\n+ SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n+ } else {\n+ SWIG_Python_TypeError(\"C/C++ pointer\", obj);\n+ }\n+ }\n+ return -1;\n+}\n+\n+/* Convert a pointer value, signal an exception on a type mismatch */\n+SWIGRUNTIME void *\n+SWIG_Python_MustGetPtr(PyObject *obj, swig_type_info *ty, int argnum, int flags) {\n+ void *result;\n+ if (SWIG_Python_ConvertPtr(obj, &result, ty, flags) == -1) {\n+ PyErr_Clear();\n+ if (flags & SWIG_POINTER_EXCEPTION) {\n+ SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n+ SWIG_Python_ArgFail(argnum);\n+ }\n+ }\n+ return result;\n+}\n+\n+/* Convert a packed value value */\n+SWIGRUNTIME int\n+SWIG_Python_ConvertPacked(PyObject *obj, void *ptr, size_t sz, swig_type_info *ty, int flags) {\n+ swig_cast_info *tc;\n+ const char *c = 0;\n+\n+#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)\n+ c = PySwigPacked_UnpackData(obj, ptr, sz);\n+#else\n+ if ((!obj) || (!PyString_Check(obj))) goto type_error;\n+ c = PyString_AS_STRING(obj);\n+ /* Pointer values must start with leading underscore */\n+ c = SWIG_UnpackDataName(c, ptr, sz, ty->name);\n+#endif\n+ if (!c) goto type_error;\n+ if (ty) {\n+ tc = SWIG_TypeCheck(c,ty);\n+ if (!tc) goto type_error;\n+ }\n+ return 0;\n+\n+type_error:\n+ PyErr_Clear();\n+ if (flags & SWIG_POINTER_EXCEPTION) {\n+ if (ty) {\n+ SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n+ } else {\n+ SWIG_Python_TypeError(\"C/C++ packed data\", obj);\n+ }\n+ }\n+ return -1;\n+} \n+\n+/* Create a new array object */\n+SWIGRUNTIME PyObject *\n+SWIG_Python_NewPointerObj(void *ptr, swig_type_info *type, int own) {\n+ PyObject *robj = 0;\n+ if (!type) {\n+ if (!PyErr_Occurred()) {\n+ PyErr_Format(PyExc_TypeError, \"Swig: null type passed to NewPointerObj\");\n+ }\n+ return robj;\n+ }\n+ if (!ptr) {\n+ Py_INCREF(Py_None);\n+ return Py_None;\n+ }\n+#ifdef SWIG_COBJECT_TYPES\n+ robj = PySwigObject_FromVoidPtrAndDesc((void *) ptr, (char *)type->name);\n+#else\n+ {\n+ char result[SWIG_BUFFER_SIZE];\n+ robj = SWIG_PackVoidPtr(result, ptr, type->name, sizeof(result)) ?\n+ PyString_FromString(result) : 0;\n+ }\n+#endif\n+ if (!robj || (robj == Py_None)) return robj;\n+ if (type->clientdata) {\n+ PyObject *inst;\n+ PyObject *args = Py_BuildValue((char*)\"(O)\", robj);\n+ Py_DECREF(robj);\n+ inst = PyObject_CallObject((PyObject *) type->clientdata, args);\n+ Py_DECREF(args);\n+ if (inst) {\n+ if (own) {\n+ PyObject_SetAttrString(inst,(char*)\"thisown\",Py_True);\n+ }\n+ robj = inst;\n+ }\n+ }\n+ return robj;\n+}\n+\n+SWIGRUNTIME PyObject *\n+SWIG_Python_NewPackedObj(void *ptr, size_t sz, swig_type_info *type) {\n+ PyObject *robj = 0;\n+ if (!ptr) {\n+ Py_INCREF(Py_None);\n+ return Py_None;\n+ }\n+#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)\n+ robj = PySwigPacked_FromDataAndDesc((void *) ptr, sz, (char *)type->name);\n+#else\n+ {\n+ char result[SWIG_BUFFER_SIZE];\n+ robj = SWIG_PackDataName(result, ptr, sz, type->name, sizeof(result)) ?\n+ PyString_FromString(result) : 0;\n+ }\n+#endif\n+ return robj;\n+}\n+\n+/* -----------------------------------------------------------------------------*\n+ * Get type list \n+ * -----------------------------------------------------------------------------*/\n+\n+#ifdef SWIG_LINK_RUNTIME\n+void *SWIG_ReturnGlobalTypeList(void *);\n+#endif\n+\n+SWIGRUNTIME swig_module_info *\n+SWIG_Python_GetModule(void) {\n+ static void *type_pointer = (void *)0;\n+ /* first check if module already created */\n+ if (!type_pointer) {\n+#ifdef SWIG_LINK_RUNTIME\n+ type_pointer = SWIG_ReturnGlobalTypeList((void *)0);\n+#else\n+ type_pointer = PyCObject_Import((char*)\"swig_runtime_data\" SWIG_RUNTIME_VERSION,\n+\t\t\t\t (char*)\"type_pointer\" SWIG_TYPE_TABLE_NAME);\n+ if (PyErr_Occurred()) {\n+ PyErr_Clear();\n+ type_pointer = (void *)0;\n+ }\n+ }\n+#endif\n+ return (swig_module_info *) type_pointer;\n+}\n+\n+SWIGRUNTIME void\n+SWIG_Python_SetModule(swig_module_info *swig_module) {\n+ static PyMethodDef swig_empty_runtime_method_table[] = { {NULL, NULL, 0, NULL} };/* Sentinel */\n+\n+ PyObject *module = Py_InitModule((char*)\"swig_runtime_data\" SWIG_RUNTIME_VERSION,\n+\t\t\t\t swig_empty_runtime_method_table);\n+ PyObject *pointer = PyCObject_FromVoidPtr((void *) swig_module, NULL);\n+ if (pointer && module) {\n+ PyModule_AddObject(module, (char*)\"type_pointer\" SWIG_TYPE_TABLE_NAME, pointer);\n+ }\n+}\n+\n+#ifdef __cplusplus\n+}\n+#endif\n+\n+/* -----------------------------------------------------------------------------*\n+ Standard SWIG API for use inside user code.\n+ \n+ Don't include this file directly, run the command\n+ swig -python -external-runtime\n+ Also, read the Modules chapter of the SWIG Manual.\n+ \n+ * -----------------------------------------------------------------------------*/\n+\n+#ifdef SWIG_MODULE_CLIENTDATA_TYPE\n+\n+SWIGRUNTIMEINLINE swig_type_info *\n+SWIG_TypeQuery(SWIG_MODULE_CLIENTDATA_TYPE clientdata, const char *name) {\n+ swig_module_info *module = SWIG_GetModule(clientdata);\n+ return SWIG_TypeQueryModule(module, module, name);\n+}\n+\n+SWIGRUNTIMEINLINE swig_type_info *\n+SWIG_MangledTypeQuery(SWIG_MODULE_CLIENTDATA_TYPE clientdata, const char *name) {\n+ swig_module_info *module = SWIG_GetModule(clientdata);\n+ return SWIG_MangledTypeQueryModule(module, module, name);\n+}\n+\n+#else\n+\n+SWIGRUNTIMEINLINE swig_type_info *\n+SWIG_TypeQuery(const char *name) {\n+ swig_module_info *module = SWIG_GetModule();\n+ return SWIG_TypeQueryModule(module, module, name);\n+}\n+\n+SWIGRUNTIMEINLINE swig_type_info *\n+SWIG_MangledTypeQuery(const char *name) {\n+ swig_module_info *module = SWIG_GetModule();\n+ return SWIG_MangledTypeQueryModule(module, module, name);\n+}\n+\n+#endif\n+\n+\n \"\"\"\n", "added_lines": 1344, "deleted_lines": 0, "source_code": "# This code allows one to use SWIG wrapped objects from weave. This\n# code is specific to SWIG-1.3 and above where things are different.\n# The code is basically all copied out from the SWIG wrapper code but\n# it has been hand edited for brevity.\n#\n# Prabhu Ramachandran \n\n######################################################################\n# This is for SWIG-1.3.x where x < 22.\n# Essentially, SWIG_RUNTIME_VERSION was not yet used.\nswigptr2_code_v0 = \"\"\"\n\n#include \"Python.h\"\n\n/*************************************************************** -*- c -*-\n * python/precommon.swg\n *\n * Rename all exported symbols from common.swg, to avoid symbol\n * clashes if multiple interpreters are included\n *\n ************************************************************************/\n\n#define SWIG_TypeCheck SWIG_Python_TypeCheck\n#define SWIG_TypeCast SWIG_Python_TypeCast\n#define SWIG_TypeName SWIG_Python_TypeName\n#define SWIG_TypeQuery SWIG_Python_TypeQuery\n#define SWIG_PackData SWIG_Python_PackData \n#define SWIG_UnpackData SWIG_Python_UnpackData \n\n\n/***********************************************************************\n * common.swg\n *\n * This file contains generic SWIG runtime support for pointer\n * type checking as well as a few commonly used macros to control\n * external linkage.\n *\n * Author : David Beazley (beazley@cs.uchicago.edu)\n *\n * Copyright (c) 1999-2000, The University of Chicago\n * \n * This file may be freely redistributed without license or fee provided\n * this copyright message remains intact.\n ************************************************************************/\n\n#include \n\n#if defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__)\n# if defined(_MSC_VER) || defined(__GNUC__)\n# if defined(STATIC_LINKED)\n# define SWIGEXPORT(a) a\n# define SWIGIMPORT(a) extern a\n# else\n# define SWIGEXPORT(a) __declspec(dllexport) a\n# define SWIGIMPORT(a) extern a\n# endif\n# else\n# if defined(__BORLANDC__)\n# define SWIGEXPORT(a) a _export\n# define SWIGIMPORT(a) a _export\n# else\n# define SWIGEXPORT(a) a\n# define SWIGIMPORT(a) a\n# endif\n# endif\n#else\n# define SWIGEXPORT(a) a\n# define SWIGIMPORT(a) a\n#endif\n\n#ifdef SWIG_GLOBAL\n# define SWIGRUNTIME(a) SWIGEXPORT(a)\n#else\n# define SWIGRUNTIME(a) static a\n#endif\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\ntypedef void *(*swig_converter_func)(void *);\ntypedef struct swig_type_info *(*swig_dycast_func)(void **);\n\ntypedef struct swig_type_info {\n const char *name;\n swig_converter_func converter;\n const char *str;\n void *clientdata;\n swig_dycast_func dcast;\n struct swig_type_info *next;\n struct swig_type_info *prev;\n} swig_type_info;\n\n#ifdef SWIG_NOINCLUDE\n\nSWIGIMPORT(swig_type_info *) SWIG_TypeCheck(char *c, swig_type_info *);\nSWIGIMPORT(void *) SWIG_TypeCast(swig_type_info *, void *);\nSWIGIMPORT(const char *) SWIG_TypeName(const swig_type_info *);\nSWIGIMPORT(swig_type_info *) SWIG_TypeQuery(const char *);\nSWIGIMPORT(char *) SWIG_PackData(char *, void *, int);\nSWIGIMPORT(char *) SWIG_UnpackData(char *, void *, int);\n\n#else\n\nstatic swig_type_info *swig_type_list = 0;\n\n/* Check the typename */\nSWIGRUNTIME(swig_type_info *) \nSWIG_TypeCheck(char *c, swig_type_info *ty) {\n swig_type_info *s;\n if (!ty) return 0; /* Void pointer */\n s = ty->next; /* First element always just a name */\n do {\n if (strcmp(s->name,c) == 0) {\n if (s == ty->next) return s;\n /* Move s to the top of the linked list */\n s->prev->next = s->next;\n if (s->next) {\n s->next->prev = s->prev;\n }\n /* Insert s as second element in the list */\n s->next = ty->next;\n if (ty->next) ty->next->prev = s;\n ty->next = s;\n s->prev = ty;\n return s;\n }\n s = s->next;\n } while (s && (s != ty->next));\n return 0;\n}\n\n/* Cast a pointer up an inheritance hierarchy */\nSWIGRUNTIME(void *) \nSWIG_TypeCast(swig_type_info *ty, void *ptr) {\n if ((!ty) || (!ty->converter)) return ptr;\n return (*ty->converter)(ptr);\n}\n\n/* Return the name associated with this type */\nSWIGRUNTIME(const char *)\nSWIG_TypeName(const swig_type_info *ty) {\n return ty->name;\n}\n\n/* \n Compare two type names skipping the space characters, therefore\n \"char*\" == \"char *\" and \"Class\" == \"Class\", etc.\n\n Return 0 when the two name types are equivalent, as in\n strncmp, but skipping ' '.\n*/\nstatic int\nSWIG_TypeNameComp(const char *f1, const char *l1,\n\t\t const char *f2, const char *l2) {\n for (;(f1 != l1) && (f2 != l2); ++f1, ++f2) {\n while ((*f1 == ' ') && (f1 != l1)) ++f1;\n while ((*f2 == ' ') && (f2 != l2)) ++f2;\n if (*f1 != *f2) return *f1 - *f2;\n }\n return (l1 - f1) - (l2 - f2);\n}\n\n/*\n Check type equivalence in a name list like ||...\n*/\nstatic int\nSWIG_TypeEquiv(const char *nb, const char *tb) {\n int equiv = 0;\n const char* te = tb + strlen(tb);\n const char* ne = nb;\n while (!equiv && *ne) {\n for (nb = ne; *ne; ++ne) {\n if (*ne == '|') break;\n }\n equiv = SWIG_TypeNameComp(nb, ne, tb, te) == 0;\n if (*ne) ++ne;\n }\n return equiv;\n}\n \n\n/* Search for a swig_type_info structure */\nSWIGRUNTIME(swig_type_info *)\nSWIG_TypeQuery(const char *name) {\n swig_type_info *ty = swig_type_list;\n while (ty) {\n if (ty->str && (SWIG_TypeEquiv(ty->str,name))) return ty;\n if (ty->name && (strcmp(name,ty->name) == 0)) return ty;\n ty = ty->prev;\n }\n return 0;\n}\n\n/* Pack binary data into a string */\nSWIGRUNTIME(char *)\nSWIG_PackData(char *c, void *ptr, int sz) {\n static char hex[17] = \"0123456789abcdef\";\n int i;\n unsigned char *u = (unsigned char *) ptr;\n register unsigned char uu;\n for (i = 0; i < sz; i++,u++) {\n uu = *u;\n *(c++) = hex[(uu & 0xf0) >> 4];\n *(c++) = hex[uu & 0xf];\n }\n return c;\n}\n\n/* Unpack binary data from a string */\nSWIGRUNTIME(char *)\nSWIG_UnpackData(char *c, void *ptr, int sz) {\n register unsigned char uu = 0;\n register int d;\n unsigned char *u = (unsigned char *) ptr;\n int i;\n for (i = 0; i < sz; i++, u++) {\n d = *(c++);\n if ((d >= '0') && (d <= '9'))\n uu = ((d - '0') << 4);\n else if ((d >= 'a') && (d <= 'f'))\n uu = ((d - ('a'-10)) << 4);\n d = *(c++);\n if ((d >= '0') && (d <= '9'))\n uu |= (d - '0');\n else if ((d >= 'a') && (d <= 'f'))\n uu |= (d - ('a'-10));\n *u = uu;\n }\n return c;\n}\n\n#endif\n\n#ifdef __cplusplus\n}\n#endif\n\n/***********************************************************************\n * python.swg\n *\n * This file contains the runtime support for Python modules\n * and includes code for managing global variables and pointer\n * type checking.\n *\n * Author : David Beazley (beazley@cs.uchicago.edu)\n ************************************************************************/\n\n#include \"Python.h\"\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n#define SWIG_PY_INT 1\n#define SWIG_PY_FLOAT 2\n#define SWIG_PY_STRING 3\n#define SWIG_PY_POINTER 4\n#define SWIG_PY_BINARY 5\n\n/* Flags for pointer conversion */\n\n#define SWIG_POINTER_EXCEPTION 0x1\n#define SWIG_POINTER_DISOWN 0x2\n\n/* Exception handling in wrappers */\n#define SWIG_fail goto fail\n\n/* Constant information structure */\ntypedef struct swig_const_info {\n int type;\n char *name;\n long lvalue;\n double dvalue;\n void *pvalue;\n swig_type_info **ptype;\n} swig_const_info;\n\n/* Common SWIG API */\n#define SWIG_ConvertPtr(obj, pp, type, flags) \\\n SWIG_Python_ConvertPtr(obj, pp, type, flags)\n#define SWIG_NewPointerObj(p, type, flags) \\\n SWIG_Python_NewPointerObj(p, type, flags)\n#define SWIG_MustGetPtr(p, type, argnum, flags) \\\n SWIG_Python_MustGetPtr(p, type, argnum, flags)\n \n\ntypedef double (*py_objasdbl_conv)(PyObject *obj);\n\n#ifdef SWIG_NOINCLUDE\n\nSWIGIMPORT(int) SWIG_Python_ConvertPtr(PyObject *, void **, swig_type_info *, int);\nSWIGIMPORT(PyObject *) SWIG_Python_NewPointerObj(void *, swig_type_info *,int own);\nSWIGIMPORT(void *) SWIG_Python_MustGetPtr(PyObject *, swig_type_info *, int, int);\n\n#else\n\n\n/* Convert a pointer value */\nSWIGRUNTIME(int)\nSWIG_Python_ConvertPtr(PyObject *obj, void **ptr, swig_type_info *ty, int flags) {\n swig_type_info *tc;\n char *c = 0;\n static PyObject *SWIG_this = 0;\n int newref = 0;\n PyObject *pyobj = 0;\n\n if (!obj) return 0;\n if (obj == Py_None) {\n *ptr = 0;\n return 0;\n }\n#ifdef SWIG_COBJECT_TYPES\n if (!(PyCObject_Check(obj))) {\n if (!SWIG_this)\n SWIG_this = PyString_FromString(\"this\");\n pyobj = obj;\n obj = PyObject_GetAttr(obj,SWIG_this);\n newref = 1;\n if (!obj) goto type_error;\n if (!PyCObject_Check(obj)) {\n Py_DECREF(obj);\n goto type_error;\n }\n } \n *ptr = PyCObject_AsVoidPtr(obj);\n c = (char *) PyCObject_GetDesc(obj);\n if (newref) Py_DECREF(obj);\n goto cobject;\n#else\n if (!(PyString_Check(obj))) {\n if (!SWIG_this)\n SWIG_this = PyString_FromString(\"this\");\n pyobj = obj;\n obj = PyObject_GetAttr(obj,SWIG_this);\n newref = 1;\n if (!obj) goto type_error;\n if (!PyString_Check(obj)) {\n Py_DECREF(obj);\n goto type_error;\n }\n } \n c = PyString_AsString(obj);\n /* Pointer values must start with leading underscore */\n if (*c != '_') {\n *ptr = (void *) 0;\n if (strcmp(c,\"NULL\") == 0) {\n if (newref) { Py_DECREF(obj); }\n return 0;\n } else {\n if (newref) { Py_DECREF(obj); }\n goto type_error;\n }\n }\n c++;\n c = SWIG_UnpackData(c,ptr,sizeof(void *));\n if (newref) { Py_DECREF(obj); }\n#endif\n\n#ifdef SWIG_COBJECT_TYPES\ncobject:\n#endif\n\n if (ty) {\n tc = SWIG_TypeCheck(c,ty);\n if (!tc) goto type_error;\n *ptr = SWIG_TypeCast(tc,(void*) *ptr);\n }\n\n if ((pyobj) && (flags & SWIG_POINTER_DISOWN)) {\n PyObject *zero = PyInt_FromLong(0);\n PyObject_SetAttrString(pyobj,(char*)\"thisown\",zero);\n Py_DECREF(zero);\n }\n return 0;\n\ntype_error:\n PyErr_Clear();\n if (flags & SWIG_POINTER_EXCEPTION) {\n if (ty && c) {\n PyErr_Format(PyExc_TypeError, \n\t\t \"Type error. Got %s, expected %s\",\n\t\t c, ty->name);\n } else {\n PyErr_SetString(PyExc_TypeError,\"Expected a pointer\");\n }\n }\n return -1;\n}\n\n/* Convert a pointer value, signal an exception on a type mismatch */\nSWIGRUNTIME(void *)\nSWIG_Python_MustGetPtr(PyObject *obj, swig_type_info *ty, int argnum, int flags) {\n void *result;\n SWIG_Python_ConvertPtr(obj, &result, ty, flags | SWIG_POINTER_EXCEPTION);\n return result;\n}\n\n/* Create a new pointer object */\nSWIGRUNTIME(PyObject *)\nSWIG_Python_NewPointerObj(void *ptr, swig_type_info *type, int own) {\n PyObject *robj;\n if (!ptr) {\n Py_INCREF(Py_None);\n return Py_None;\n }\n#ifdef SWIG_COBJECT_TYPES\n robj = PyCObject_FromVoidPtrAndDesc((void *) ptr, (char *) type->name, NULL);\n#else\n {\n char result[1024];\n char *r = result;\n *(r++) = '_';\n r = SWIG_PackData(r,&ptr,sizeof(void *));\n strcpy(r,type->name);\n robj = PyString_FromString(result);\n }\n#endif\n if (!robj || (robj == Py_None)) return robj;\n if (type->clientdata) {\n PyObject *inst;\n PyObject *args = Py_BuildValue((char*)\"(O)\", robj);\n Py_DECREF(robj);\n inst = PyObject_CallObject((PyObject *) type->clientdata, args);\n Py_DECREF(args);\n if (inst) {\n if (own) {\n PyObject *n = PyInt_FromLong(1);\n PyObject_SetAttrString(inst,(char*)\"thisown\",n);\n Py_DECREF(n);\n }\n robj = inst;\n }\n }\n return robj;\n}\n\n#endif\n\n#ifdef __cplusplus\n}\n#endif\n\n\"\"\"\n\n\n######################################################################\n# This is for SWIG-1.3.x where x >= 23.\n# SWIG_RUNTIME_VERSION == \"1\"\n\n# All this does is to include (cut/paste): \n# and \nswigptr2_code_v1 = \"\"\"\n/***********************************************************************\n * swigrun.swg\n *\n * This file contains generic CAPI SWIG runtime support for pointer\n * type checking.\n *\n ************************************************************************/\n\n/* This should only be incremented when either the layout of swig_type_info changes,\n or for whatever reason, the runtime changes incompatibly */\n#define SWIG_RUNTIME_VERSION \"1\"\n\n/* define SWIG_TYPE_TABLE_NAME as \"SWIG_TYPE_TABLE\" */\n#ifdef SWIG_TYPE_TABLE\n#define SWIG_QUOTE_STRING(x) #x\n#define SWIG_EXPAND_AND_QUOTE_STRING(x) SWIG_QUOTE_STRING(x)\n#define SWIG_TYPE_TABLE_NAME SWIG_EXPAND_AND_QUOTE_STRING(SWIG_TYPE_TABLE)\n#else\n#define SWIG_TYPE_TABLE_NAME\n#endif\n\n#include \n\n#ifndef SWIGINLINE\n#if defined(__cplusplus) || (defined(__GNUC__) && !defined(__STRICT_ANSI__))\n# define SWIGINLINE inline\n#else\n# define SWIGINLINE\n#endif\n#endif\n\n/*\n You can use the SWIGRUNTIME and SWIGRUNTIMEINLINE macros for\n creating a static or dynamic library from the swig runtime code.\n In 99.9% of the cases, swig just needs to declare them as 'static'.\n \n But only do this if is strictly necessary, ie, if you have problems\n with your compiler or so.\n*/\n#ifndef SWIGRUNTIME\n#define SWIGRUNTIME static\n#endif\n#ifndef SWIGRUNTIMEINLINE\n#define SWIGRUNTIMEINLINE SWIGRUNTIME SWIGINLINE\n#endif\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\ntypedef void *(*swig_converter_func)(void *);\ntypedef struct swig_type_info *(*swig_dycast_func)(void **);\n\ntypedef struct swig_type_info {\n const char *name;\n swig_converter_func converter;\n const char *str;\n void *clientdata;\n swig_dycast_func dcast;\n struct swig_type_info *next;\n struct swig_type_info *prev;\n} swig_type_info;\n\n/* \n Compare two type names skipping the space characters, therefore\n \"char*\" == \"char *\" and \"Class\" == \"Class\", etc.\n\n Return 0 when the two name types are equivalent, as in\n strncmp, but skipping ' '.\n*/\nSWIGRUNTIME int\nSWIG_TypeNameComp(const char *f1, const char *l1,\n\t\t const char *f2, const char *l2) {\n for (;(f1 != l1) && (f2 != l2); ++f1, ++f2) {\n while ((*f1 == ' ') && (f1 != l1)) ++f1;\n while ((*f2 == ' ') && (f2 != l2)) ++f2;\n if (*f1 != *f2) return *f1 - *f2;\n }\n return (l1 - f1) - (l2 - f2);\n}\n\n/*\n Check type equivalence in a name list like ||...\n*/\nSWIGRUNTIME int\nSWIG_TypeEquiv(const char *nb, const char *tb) {\n int equiv = 0;\n const char* te = tb + strlen(tb);\n const char* ne = nb;\n while (!equiv && *ne) {\n for (nb = ne; *ne; ++ne) {\n if (*ne == '|') break;\n }\n equiv = SWIG_TypeNameComp(nb, ne, tb, te) == 0;\n if (*ne) ++ne;\n }\n return equiv;\n}\n\n/*\n Register a type mapping with the type-checking\n*/\nSWIGRUNTIME swig_type_info *\nSWIG_TypeRegisterTL(swig_type_info **tl, swig_type_info *ti) {\n swig_type_info *tc, *head, *ret, *next;\n /* Check to see if this type has already been registered */\n tc = *tl;\n while (tc) {\n /* check simple type equivalence */\n int typeequiv = (strcmp(tc->name, ti->name) == 0); \n /* check full type equivalence, resolving typedefs */\n if (!typeequiv) {\n /* only if tc is not a typedef (no '|' on it) */\n if (tc->str && ti->str && !strstr(tc->str,\"|\")) {\n\ttypeequiv = SWIG_TypeEquiv(ti->str,tc->str);\n }\n }\n if (typeequiv) {\n /* Already exists in the table. Just add additional types to the list */\n if (ti->clientdata) tc->clientdata = ti->clientdata;\n head = tc;\n next = tc->next;\n goto l1;\n }\n tc = tc->prev;\n }\n head = ti;\n next = 0;\n\n /* Place in list */\n ti->prev = *tl;\n *tl = ti;\n\n /* Build linked lists */\n l1:\n ret = head;\n tc = ti + 1;\n /* Patch up the rest of the links */\n while (tc->name) {\n head->next = tc;\n tc->prev = head;\n head = tc;\n tc++;\n }\n if (next) next->prev = head;\n head->next = next;\n\n return ret;\n}\n\n/*\n Check the typename\n*/\nSWIGRUNTIME swig_type_info *\nSWIG_TypeCheck(const char *c, swig_type_info *ty) {\n swig_type_info *s;\n if (!ty) return 0; /* Void pointer */\n s = ty->next; /* First element always just a name */\n do {\n if (strcmp(s->name,c) == 0) {\n if (s == ty->next) return s;\n /* Move s to the top of the linked list */\n s->prev->next = s->next;\n if (s->next) {\n s->next->prev = s->prev;\n }\n /* Insert s as second element in the list */\n s->next = ty->next;\n if (ty->next) ty->next->prev = s;\n ty->next = s;\n s->prev = ty;\n return s;\n }\n s = s->next;\n } while (s && (s != ty->next));\n return 0;\n}\n\n/*\n Cast a pointer up an inheritance hierarchy\n*/\nSWIGRUNTIMEINLINE void *\nSWIG_TypeCast(swig_type_info *ty, void *ptr) {\n return ((!ty) || (!ty->converter)) ? ptr : (*ty->converter)(ptr);\n}\n\n/* \n Dynamic pointer casting. Down an inheritance hierarchy\n*/\nSWIGRUNTIME swig_type_info *\nSWIG_TypeDynamicCast(swig_type_info *ty, void **ptr) {\n swig_type_info *lastty = ty;\n if (!ty || !ty->dcast) return ty;\n while (ty && (ty->dcast)) {\n ty = (*ty->dcast)(ptr);\n if (ty) lastty = ty;\n }\n return lastty;\n}\n\n/*\n Return the name associated with this type\n*/\nSWIGRUNTIMEINLINE const char *\nSWIG_TypeName(const swig_type_info *ty) {\n return ty->name;\n}\n\n/*\n Return the pretty name associated with this type,\n that is an unmangled type name in a form presentable to the user.\n*/\nSWIGRUNTIME const char *\nSWIG_TypePrettyName(const swig_type_info *type) {\n /* The \"str\" field contains the equivalent pretty names of the\n type, separated by vertical-bar characters. We choose\n to print the last name, as it is often (?) the most\n specific. */\n if (type->str != NULL) {\n const char *last_name = type->str;\n const char *s;\n for (s = type->str; *s; s++)\n if (*s == '|') last_name = s+1;\n return last_name;\n }\n else\n return type->name;\n}\n\n/*\n Search for a swig_type_info structure\n*/\nSWIGRUNTIME swig_type_info *\nSWIG_TypeQueryTL(swig_type_info *tl, const char *name) {\n swig_type_info *ty = tl;\n while (ty) {\n if (ty->str && (SWIG_TypeEquiv(ty->str,name))) return ty;\n if (ty->name && (strcmp(name,ty->name) == 0)) return ty;\n ty = ty->prev;\n }\n return 0;\n}\n\n/* \n Set the clientdata field for a type\n*/\nSWIGRUNTIME void\nSWIG_TypeClientDataTL(swig_type_info *tl, swig_type_info *ti, void *clientdata) {\n swig_type_info *tc, *equiv;\n if (ti->clientdata) return;\n /* if (ti->clientdata == clientdata) return; */\n ti->clientdata = clientdata;\n equiv = ti->next;\n while (equiv) {\n if (!equiv->converter) {\n tc = tl;\n while (tc) {\n if ((strcmp(tc->name, equiv->name) == 0))\n SWIG_TypeClientDataTL(tl,tc,clientdata);\n tc = tc->prev;\n }\n }\n equiv = equiv->next;\n }\n}\n\n/* \n Pack binary data into a string\n*/\nSWIGRUNTIME char *\nSWIG_PackData(char *c, void *ptr, size_t sz) {\n static char hex[17] = \"0123456789abcdef\";\n unsigned char *u = (unsigned char *) ptr;\n const unsigned char *eu = u + sz;\n register unsigned char uu;\n for (; u != eu; ++u) {\n uu = *u;\n *(c++) = hex[(uu & 0xf0) >> 4];\n *(c++) = hex[uu & 0xf];\n }\n return c;\n}\n\n/* \n Unpack binary data from a string\n*/\nSWIGRUNTIME const char *\nSWIG_UnpackData(const char *c, void *ptr, size_t sz) {\n register unsigned char *u = (unsigned char *) ptr;\n register const unsigned char *eu = u + sz;\n for (; u != eu; ++u) {\n register int d = *(c++);\n register unsigned char uu = 0;\n if ((d >= '0') && (d <= '9'))\n uu = ((d - '0') << 4);\n else if ((d >= 'a') && (d <= 'f'))\n uu = ((d - ('a'-10)) << 4);\n else \n return (char *) 0;\n d = *(c++);\n if ((d >= '0') && (d <= '9'))\n uu |= (d - '0');\n else if ((d >= 'a') && (d <= 'f'))\n uu |= (d - ('a'-10));\n else \n return (char *) 0;\n *u = uu;\n }\n return c;\n}\n\n/*\n This function will propagate the clientdata field of type to any new\n swig_type_info structures that have been added into the list of\n equivalent types. It is like calling SWIG_TypeClientData(type,\n clientdata) a second time.\n*/\nSWIGRUNTIME void\nSWIG_PropagateClientDataTL(swig_type_info *tl, swig_type_info *type) {\n swig_type_info *equiv = type->next;\n swig_type_info *tc;\n if (!type->clientdata) return;\n while (equiv) {\n if (!equiv->converter) {\n tc = tl;\n while (tc) {\n if ((strcmp(tc->name, equiv->name) == 0) && !tc->clientdata)\n SWIG_TypeClientDataTL(tl,tc, type->clientdata);\n tc = tc->prev;\n }\n }\n equiv = equiv->next;\n }\n}\n\n/* \n Pack 'void *' into a string buffer.\n*/\nSWIGRUNTIME char *\nSWIG_PackVoidPtr(char *buff, void *ptr, const char *name, size_t bsz) {\n char *r = buff;\n if ((2*sizeof(void *) + 2) > bsz) return 0;\n *(r++) = '_';\n r = SWIG_PackData(r,&ptr,sizeof(void *));\n if (strlen(name) + 1 > (bsz - (r - buff))) return 0;\n strcpy(r,name);\n return buff;\n}\n\nSWIGRUNTIME const char *\nSWIG_UnpackVoidPtr(const char *c, void **ptr, const char *name) {\n if (*c != '_') {\n if (strcmp(c,\"NULL\") == 0) {\n *ptr = (void *) 0;\n return name;\n } else {\n return 0;\n }\n }\n return SWIG_UnpackData(++c,ptr,sizeof(void *));\n}\n\nSWIGRUNTIME char *\nSWIG_PackDataName(char *buff, void *ptr, size_t sz, const char *name, size_t bsz) {\n char *r = buff;\n size_t lname = (name ? strlen(name) : 0);\n if ((2*sz + 2 + lname) > bsz) return 0;\n *(r++) = '_';\n r = SWIG_PackData(r,ptr,sz);\n if (lname) {\n strncpy(r,name,lname+1);\n } else {\n *r = 0;\n }\n return buff;\n}\n\nSWIGRUNTIME const char *\nSWIG_UnpackDataName(const char *c, void *ptr, size_t sz, const char *name) {\n if (*c != '_') {\n if (strcmp(c,\"NULL\") == 0) {\n memset(ptr,0,sz);\n return name;\n } else {\n return 0;\n }\n }\n return SWIG_UnpackData(++c,ptr,sz);\n}\n\n#ifdef __cplusplus\n}\n#endif\n\n/***********************************************************************\n * pyrun.swg\n *\n * This file contains the runtime support for Python modules\n * and includes code for managing global variables and pointer\n * type checking.\n *\n * Author : David Beazley (beazley@cs.uchicago.edu)\n ************************************************************************/\n\n/* Common SWIG API */\n#define SWIG_ConvertPtr(obj, pp, type, flags) SWIG_Python_ConvertPtr(obj, pp, type, flags)\n#define SWIG_NewPointerObj(p, type, flags) SWIG_Python_NewPointerObj(p, type, flags)\n#define SWIG_MustGetPtr(p, type, argnum, flags) SWIG_Python_MustGetPtr(p, type, argnum, flags)\n \n\n/* Python-specific SWIG API */\n#define SWIG_ConvertPacked(obj, ptr, sz, ty, flags) SWIG_Python_ConvertPacked(obj, ptr, sz, ty, flags)\n#define SWIG_NewPackedObj(ptr, sz, type) SWIG_Python_NewPackedObj(ptr, sz, type)\n\n\n/* -----------------------------------------------------------------------------\n * Pointer declarations\n * ----------------------------------------------------------------------------- */\n/*\n Use SWIG_NO_COBJECT_TYPES to force the use of strings to represent\n C/C++ pointers in the python side. Very useful for debugging, but\n not always safe.\n*/\n#if !defined(SWIG_NO_COBJECT_TYPES) && !defined(SWIG_COBJECT_TYPES)\n# define SWIG_COBJECT_TYPES\n#endif\n\n/* Flags for pointer conversion */\n#define SWIG_POINTER_EXCEPTION 0x1\n#define SWIG_POINTER_DISOWN 0x2\n\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n/* -----------------------------------------------------------------------------\n * Create a new pointer string \n * ----------------------------------------------------------------------------- */\n\n#ifndef SWIG_BUFFER_SIZE\n#define SWIG_BUFFER_SIZE 1024\n#endif\n\n#if defined(SWIG_COBJECT_TYPES)\n#if !defined(SWIG_COBJECT_PYTHON)\n/* -----------------------------------------------------------------------------\n * Implements a simple Swig Object type, and use it instead of PyCObject\n * ----------------------------------------------------------------------------- */\n\ntypedef struct {\n PyObject_HEAD\n void *ptr;\n const char *desc;\n} PySwigObject;\n\n/* Declarations for objects of type PySwigObject */\n\nSWIGRUNTIME int\nPySwigObject_print(PySwigObject *v, FILE *fp, int flags)\n{\n char result[SWIG_BUFFER_SIZE];\n if (SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result))) {\n fputs(\"\", fp);\n return 0; \n } else {\n return 1; \n }\n}\n \nSWIGRUNTIME PyObject *\nPySwigObject_repr(PySwigObject *v)\n{\n char result[SWIG_BUFFER_SIZE];\n return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?\n PyString_FromFormat(\"\", result) : 0;\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_str(PySwigObject *v)\n{\n char result[SWIG_BUFFER_SIZE];\n return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?\n PyString_FromString(result) : 0;\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_long(PySwigObject *v)\n{\n return PyLong_FromUnsignedLong((unsigned long) v->ptr);\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_oct(PySwigObject *v)\n{\n char buf[100];\n unsigned long x = (unsigned long)v->ptr;\n if (x == 0)\n strcpy(buf, \"0\");\n else\n PyOS_snprintf(buf, sizeof(buf), \"0%lo\", x);\n return PyString_FromString(buf);\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_hex(PySwigObject *v)\n{\n char buf[100];\n PyOS_snprintf(buf, sizeof(buf), \"0x%lx\", (unsigned long)v->ptr);\n return PyString_FromString(buf);\n}\n\nSWIGRUNTIME int\nPySwigObject_compare(PySwigObject *v, PySwigObject *w)\n{\n int c = strcmp(v->desc, w->desc);\n if (c) {\n return c;\n } else {\n void *i = v->ptr;\n void *j = w->ptr;\n return (i < j) ? -1 : (i > j) ? 1 : 0;\n }\n}\n\nSWIGRUNTIME void\nPySwigObject_dealloc(PySwigObject *self)\n{\n PyObject_DEL(self);\n}\n\nSWIGRUNTIME PyTypeObject*\nPySwigObject_GetType() {\n static char PySwigObject_Type__doc__[] = \n \"Swig object carries a C/C++ instance pointer\";\n \n static PyNumberMethods PySwigObject_as_number = {\n (binaryfunc)0, /*nb_add*/\n (binaryfunc)0, /*nb_subtract*/\n (binaryfunc)0, /*nb_multiply*/\n (binaryfunc)0, /*nb_divide*/\n (binaryfunc)0, /*nb_remainder*/\n (binaryfunc)0, /*nb_divmod*/\n (ternaryfunc)0,/*nb_power*/\n (unaryfunc)0, /*nb_negative*/\n (unaryfunc)0, /*nb_positive*/\n (unaryfunc)0, /*nb_absolute*/\n (inquiry)0, /*nb_nonzero*/\n 0,\t\t /*nb_invert*/\n 0,\t\t /*nb_lshift*/\n 0,\t\t /*nb_rshift*/\n 0,\t\t /*nb_and*/\n 0,\t\t /*nb_xor*/\n 0,\t\t /*nb_or*/\n (coercion)0, /*nb_coerce*/\n (unaryfunc)PySwigObject_long, /*nb_int*/\n (unaryfunc)PySwigObject_long, /*nb_long*/\n (unaryfunc)0, /*nb_float*/\n (unaryfunc)PySwigObject_oct, /*nb_oct*/\n (unaryfunc)PySwigObject_hex, /*nb_hex*/\n#if PY_VERSION_HEX >= 0x02000000\n 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 /* nb_inplace_add -> nb_inplace_true_divide */ \n#endif\n };\n\n static PyTypeObject PySwigObject_Type = {\n PyObject_HEAD_INIT(&PyType_Type)\n 0,\t\t\t\t\t/*ob_size*/\n \"PySwigObject\",\t\t\t/*tp_name*/\n sizeof(PySwigObject),\t\t/*tp_basicsize*/\n 0,\t\t\t\t\t/*tp_itemsize*/\n /* methods */\n (destructor)PySwigObject_dealloc,\t/*tp_dealloc*/\n (printfunc)PySwigObject_print,\t/*tp_print*/\n (getattrfunc)0,\t\t\t/*tp_getattr*/\n (setattrfunc)0,\t\t\t/*tp_setattr*/\n (cmpfunc)PySwigObject_compare,\t/*tp_compare*/\n (reprfunc)PySwigObject_repr,\t/*tp_repr*/\n &PySwigObject_as_number,\t /*tp_as_number*/\n 0,\t\t\t\t\t/*tp_as_sequence*/\n 0,\t\t\t\t\t/*tp_as_mapping*/\n (hashfunc)0,\t\t\t/*tp_hash*/\n (ternaryfunc)0,\t\t\t/*tp_call*/\n (reprfunc)PySwigObject_str,\t\t/*tp_str*/\n /* Space for future expansion */\n 0L,0L,0L,0L,\n PySwigObject_Type__doc__, \t /* Documentation string */\n#if PY_VERSION_HEX >= 0x02000000\n 0, /* tp_traverse */\n 0, /* tp_clear */\n#endif\n#if PY_VERSION_HEX >= 0x02010000\n 0, /* tp_richcompare */\n 0, /* tp_weaklistoffset */\n#endif\n#if PY_VERSION_HEX >= 0x02020000\n 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */\n#endif\n#if PY_VERSION_HEX >= 0x02030000\n 0, /* tp_del */\n#endif\n#ifdef COUNT_ALLOCS\n 0,0,0,0 /* tp_alloc -> tp_next */\n#endif\n };\n\n return &PySwigObject_Type;\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_FromVoidPtrAndDesc(void *ptr, const char *desc)\n{\n PySwigObject *self = PyObject_NEW(PySwigObject, PySwigObject_GetType());\n if (self == NULL) return NULL;\n self->ptr = ptr;\n self->desc = desc;\n return (PyObject *)self;\n}\n\nSWIGRUNTIMEINLINE void *\nPySwigObject_AsVoidPtr(PyObject *self)\n{\n return ((PySwigObject *)self)->ptr;\n}\n\nSWIGRUNTIMEINLINE const char *\nPySwigObject_GetDesc(PyObject *self)\n{\n return ((PySwigObject *)self)->desc;\n}\n\nSWIGRUNTIMEINLINE int\nPySwigObject_Check(PyObject *op) {\n return ((op)->ob_type == PySwigObject_GetType()) \n || (strcmp((op)->ob_type->tp_name,\"PySwigObject\") == 0);\n}\n\n/* -----------------------------------------------------------------------------\n * Implements a simple Swig Packed type, and use it instead of string\n * ----------------------------------------------------------------------------- */\n\ntypedef struct {\n PyObject_HEAD\n void *pack;\n const char *desc;\n size_t size;\n} PySwigPacked;\n\nSWIGRUNTIME int\nPySwigPacked_print(PySwigPacked *v, FILE *fp, int flags)\n{\n char result[SWIG_BUFFER_SIZE];\n fputs(\"pack, v->size, 0, sizeof(result))) {\n fputs(\"at \", fp); \n fputs(result, fp); \n }\n fputs(v->desc,fp); \n fputs(\">\", fp);\n return 0; \n}\n \nSWIGRUNTIME PyObject *\nPySwigPacked_repr(PySwigPacked *v)\n{\n char result[SWIG_BUFFER_SIZE];\n if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))) {\n return PyString_FromFormat(\"\", result, v->desc);\n } else {\n return PyString_FromFormat(\"\", v->desc);\n } \n}\n\nSWIGRUNTIME PyObject *\nPySwigPacked_str(PySwigPacked *v)\n{\n char result[SWIG_BUFFER_SIZE];\n if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))){\n return PyString_FromFormat(\"%s%s\", result, v->desc);\n } else {\n return PyString_FromFormat(\"%s\", v->desc);\n } \n}\n\nSWIGRUNTIME int\nPySwigPacked_compare(PySwigPacked *v, PySwigPacked *w)\n{\n int c = strcmp(v->desc, w->desc);\n if (c) {\n return c;\n } else {\n size_t i = v->size;\n size_t j = w->size;\n int s = (i < j) ? -1 : (i > j) ? 1 : 0;\n return s ? s : strncmp((char *)v->pack, (char *)w->pack, 2*v->size);\n }\n}\n\nSWIGRUNTIME void\nPySwigPacked_dealloc(PySwigPacked *self)\n{\n free(self->pack);\n PyObject_DEL(self);\n}\n\nSWIGRUNTIME PyTypeObject*\nPySwigPacked_GetType() {\n static char PySwigPacked_Type__doc__[] = \n \"Swig object carries a C/C++ instance pointer\";\n \n static PyTypeObject PySwigPacked_Type = {\n PyObject_HEAD_INIT(&PyType_Type)\n 0,\t\t\t\t\t/*ob_size*/\n \"PySwigPacked\",\t\t\t/*tp_name*/\n sizeof(PySwigPacked),\t\t/*tp_basicsize*/\n 0,\t\t\t\t\t/*tp_itemsize*/\n /* methods */\n (destructor)PySwigPacked_dealloc,\t/*tp_dealloc*/\n (printfunc)PySwigPacked_print,\t/*tp_print*/\n (getattrfunc)0,\t\t\t/*tp_getattr*/\n (setattrfunc)0,\t\t\t/*tp_setattr*/\n (cmpfunc)PySwigPacked_compare,\t/*tp_compare*/\n (reprfunc)PySwigPacked_repr,\t/*tp_repr*/\n 0,\t /*tp_as_number*/\n 0,\t\t\t\t\t/*tp_as_sequence*/\n 0,\t\t\t\t\t/*tp_as_mapping*/\n (hashfunc)0,\t\t\t/*tp_hash*/\n (ternaryfunc)0,\t\t\t/*tp_call*/\n (reprfunc)PySwigPacked_str,\t\t/*tp_str*/\n /* Space for future expansion */\n 0L,0L,0L,0L,\n PySwigPacked_Type__doc__, \t /* Documentation string */\n#if PY_VERSION_HEX >= 0x02000000\n 0, /* tp_traverse */\n 0, /* tp_clear */\n#endif\n#if PY_VERSION_HEX >= 0x02010000\n 0, /* tp_richcompare */\n 0, /* tp_weaklistoffset */\n#endif\n#if PY_VERSION_HEX >= 0x02020000 \n 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */\n#endif\n#if PY_VERSION_HEX >= 0x02030000\n 0, /* tp_del */\n#endif\n#ifdef COUNT_ALLOCS\n 0,0,0,0 /* tp_alloc -> tp_next */\n#endif\n };\n\n return &PySwigPacked_Type;\n}\n\nSWIGRUNTIME PyObject *\nPySwigPacked_FromDataAndDesc(void *ptr, size_t size, const char *desc)\n{\n PySwigPacked *self = PyObject_NEW(PySwigPacked, PySwigPacked_GetType());\n if (self == NULL) {\n return NULL;\n } else {\n void *pack = malloc(size);\n memcpy(pack, ptr, size);\n self->pack = pack;\n self->desc = desc;\n self->size = size;\n return (PyObject *) self;\n }\n}\n\nSWIGRUNTIMEINLINE const char *\nPySwigPacked_UnpackData(PyObject *obj, void *ptr, size_t size)\n{\n PySwigPacked *self = (PySwigPacked *)obj;\n if (self->size != size) return 0;\n memcpy(ptr, self->pack, size);\n return self->desc;\n}\n\nSWIGRUNTIMEINLINE const char *\nPySwigPacked_GetDesc(PyObject *self)\n{\n return ((PySwigPacked *)self)->desc;\n}\n\nSWIGRUNTIMEINLINE int\nPySwigPacked_Check(PyObject *op) {\n return ((op)->ob_type == PySwigPacked_GetType()) \n || (strcmp((op)->ob_type->tp_name,\"PySwigPacked\") == 0);\n}\n\n#else\n/* -----------------------------------------------------------------------------\n * Use the old Python PyCObject instead of PySwigObject\n * ----------------------------------------------------------------------------- */\n\n#define PySwigObject_GetDesc(obj)\t PyCObject_GetDesc(obj)\n#define PySwigObject_Check(obj)\t PyCObject_Check(obj)\n#define PySwigObject_AsVoidPtr(obj)\t PyCObject_AsVoidPtr(obj)\n#define PySwigObject_FromVoidPtrAndDesc(p, d) PyCObject_FromVoidPtrAndDesc(p, d, NULL)\n\n#endif\n\n#endif\n\n/* -----------------------------------------------------------------------------\n * errors manipulation\n * ----------------------------------------------------------------------------- */\n\nSWIGRUNTIME void\nSWIG_Python_TypeError(const char *type, PyObject *obj)\n{\n if (type) {\n#if defined(SWIG_COBJECT_TYPES)\n if (PySwigObject_Check(obj)) {\n const char *otype = (const char *) PySwigObject_GetDesc(obj);\n if (otype) {\n\tPyErr_Format(PyExc_TypeError, \"a '%s' is expected, 'PySwigObject(%s)' is received\",\n\t\t type, otype);\n\treturn;\n }\n } else \n#endif \n {\n const char *otype = (obj ? obj->ob_type->tp_name : 0); \n if (otype) {\n\tPyObject *str = PyObject_Str(obj);\n\tconst char *cstr = str ? PyString_AsString(str) : 0;\n\tif (cstr) {\n\t PyErr_Format(PyExc_TypeError, \"a '%s' is expected, '%s(%s)' is received\",\n\t\t type, otype, cstr);\n\t} else {\n\t PyErr_Format(PyExc_TypeError, \"a '%s' is expected, '%s' is received\",\n\t\t type, otype);\n\t}\n\tPy_DECREF(str);\n\treturn;\n }\n } \n PyErr_Format(PyExc_TypeError, \"a '%s' is expected\", type);\n } else {\n PyErr_Format(PyExc_TypeError, \"unexpected type is received\");\n }\n}\n\nSWIGRUNTIMEINLINE void\nSWIG_Python_NullRef(const char *type)\n{\n if (type) {\n PyErr_Format(PyExc_TypeError, \"null reference of type '%s' was received\",type);\n } else {\n PyErr_Format(PyExc_TypeError, \"null reference was received\");\n }\n}\n\nSWIGRUNTIME int\nSWIG_Python_AddErrMesg(const char* mesg, int infront)\n{\n if (PyErr_Occurred()) {\n PyObject *type = 0;\n PyObject *value = 0;\n PyObject *traceback = 0;\n PyErr_Fetch(&type, &value, &traceback);\n if (value) {\n PyObject *old_str = PyObject_Str(value);\n Py_XINCREF(type);\n PyErr_Clear();\n if (infront) {\n\tPyErr_Format(type, \"%s %s\", mesg, PyString_AsString(old_str));\n } else {\n\tPyErr_Format(type, \"%s %s\", PyString_AsString(old_str), mesg);\n }\n Py_DECREF(old_str);\n }\n return 1;\n } else {\n return 0;\n }\n}\n\nSWIGRUNTIME int\nSWIG_Python_ArgFail(int argnum)\n{\n if (PyErr_Occurred()) {\n /* add information about failing argument */\n char mesg[256];\n sprintf(mesg, \"argument number %d:\", argnum);\n return SWIG_Python_AddErrMesg(mesg, 1);\n } else {\n return 0;\n }\n}\n\n\n/* -----------------------------------------------------------------------------\n * pointers/data manipulation\n * ----------------------------------------------------------------------------- */\n\n/* Convert a pointer value */\nSWIGRUNTIME int\nSWIG_Python_ConvertPtr(PyObject *obj, void **ptr, swig_type_info *ty, int flags) {\n swig_type_info *tc;\n const char *c = 0;\n static PyObject *SWIG_this = 0;\n int newref = 0;\n PyObject *pyobj = 0;\n void *vptr;\n \n if (!obj) return 0;\n if (obj == Py_None) {\n *ptr = 0;\n return 0;\n }\n\n#ifdef SWIG_COBJECT_TYPES\n if (!(PySwigObject_Check(obj))) {\n if (!SWIG_this)\n SWIG_this = PyString_FromString(\"this\");\n pyobj = obj;\n obj = PyObject_GetAttr(obj,SWIG_this);\n newref = 1;\n if (!obj) goto type_error;\n if (!PySwigObject_Check(obj)) {\n Py_DECREF(obj);\n goto type_error;\n }\n } \n vptr = PySwigObject_AsVoidPtr(obj);\n c = (const char *) PySwigObject_GetDesc(obj);\n if (newref) { Py_DECREF(obj); }\n goto type_check;\n#else\n if (!(PyString_Check(obj))) {\n if (!SWIG_this)\n SWIG_this = PyString_FromString(\"this\");\n pyobj = obj;\n obj = PyObject_GetAttr(obj,SWIG_this);\n newref = 1;\n if (!obj) goto type_error;\n if (!PyString_Check(obj)) {\n Py_DECREF(obj);\n goto type_error;\n }\n } \n c = PyString_AS_STRING(obj);\n /* Pointer values must start with leading underscore */\n c = SWIG_UnpackVoidPtr(c, &vptr, ty->name);\n if (newref) { Py_DECREF(obj); }\n if (!c) goto type_error;\n#endif\n\ntype_check:\n\n if (ty) {\n tc = SWIG_TypeCheck(c,ty);\n if (!tc) goto type_error;\n *ptr = SWIG_TypeCast(tc,vptr);\n }\n\n if ((pyobj) && (flags & SWIG_POINTER_DISOWN)) {\n PyObject_SetAttrString(pyobj,(char*)\"thisown\",Py_False);\n }\n return 0;\n\ntype_error:\n PyErr_Clear();\n if (pyobj && !obj) { \n obj = pyobj;\n if (PyCFunction_Check(obj)) {\n /* here we get the method pointer for callbacks */\n char *doc = (((PyCFunctionObject *)obj) -> m_ml -> ml_doc);\n c = doc ? strstr(doc, \"swig_ptr: \") : 0;\n if (c) {\n\tc = SWIG_UnpackVoidPtr(c + 10, &vptr, ty->name);\n\tif (!c) goto type_error;\n\tgoto type_check;\n }\n }\n }\n if (flags & SWIG_POINTER_EXCEPTION) {\n if (ty) {\n SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n } else {\n SWIG_Python_TypeError(\"C/C++ pointer\", obj);\n }\n }\n return -1;\n}\n\n/* Convert a pointer value, signal an exception on a type mismatch */\nSWIGRUNTIME void *\nSWIG_Python_MustGetPtr(PyObject *obj, swig_type_info *ty, int argnum, int flags) {\n void *result;\n if (SWIG_Python_ConvertPtr(obj, &result, ty, flags) == -1) {\n PyErr_Clear();\n if (flags & SWIG_POINTER_EXCEPTION) {\n SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n SWIG_Python_ArgFail(argnum);\n }\n }\n return result;\n}\n\n/* Convert a packed value value */\nSWIGRUNTIME int\nSWIG_Python_ConvertPacked(PyObject *obj, void *ptr, size_t sz, swig_type_info *ty, int flags) {\n swig_type_info *tc;\n const char *c = 0;\n\n#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)\n c = PySwigPacked_UnpackData(obj, ptr, sz);\n#else\n if ((!obj) || (!PyString_Check(obj))) goto type_error;\n c = PyString_AS_STRING(obj);\n /* Pointer values must start with leading underscore */\n c = SWIG_UnpackDataName(c, ptr, sz, ty->name);\n#endif\n if (!c) goto type_error;\n if (ty) {\n tc = SWIG_TypeCheck(c,ty);\n if (!tc) goto type_error;\n }\n return 0;\n\ntype_error:\n PyErr_Clear();\n if (flags & SWIG_POINTER_EXCEPTION) {\n if (ty) {\n SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n } else {\n SWIG_Python_TypeError(\"C/C++ packed data\", obj);\n }\n }\n return -1;\n} \n\n/* Create a new array object */\nSWIGRUNTIME PyObject *\nSWIG_Python_NewPointerObj(void *ptr, swig_type_info *type, int own) {\n PyObject *robj = 0;\n if (!ptr) {\n Py_INCREF(Py_None);\n return Py_None;\n }\n#ifdef SWIG_COBJECT_TYPES\n robj = PySwigObject_FromVoidPtrAndDesc((void *) ptr, (char *)type->name);\n#else\n {\n char result[SWIG_BUFFER_SIZE];\n robj = SWIG_PackVoidPtr(result, ptr, type->name, sizeof(result)) ?\n PyString_FromString(result) : 0;\n }\n#endif\n if (!robj || (robj == Py_None)) return robj;\n if (type->clientdata) {\n PyObject *inst;\n PyObject *args = Py_BuildValue((char*)\"(O)\", robj);\n Py_DECREF(robj);\n inst = PyObject_CallObject((PyObject *) type->clientdata, args);\n Py_DECREF(args);\n if (inst) {\n if (own) {\n PyObject_SetAttrString(inst,(char*)\"thisown\",Py_True);\n }\n robj = inst;\n }\n }\n return robj;\n}\n\nSWIGRUNTIME PyObject *\nSWIG_Python_NewPackedObj(void *ptr, size_t sz, swig_type_info *type) {\n PyObject *robj = 0;\n if (!ptr) {\n Py_INCREF(Py_None);\n return Py_None;\n }\n#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)\n robj = PySwigPacked_FromDataAndDesc((void *) ptr, sz, (char *)type->name);\n#else\n {\n char result[SWIG_BUFFER_SIZE];\n robj = SWIG_PackDataName(result, ptr, sz, type->name, sizeof(result)) ?\n PyString_FromString(result) : 0;\n }\n#endif\n return robj;\n}\n\n/* -----------------------------------------------------------------------------*\n * Get type list \n * -----------------------------------------------------------------------------*/\n\n#ifdef SWIG_LINK_RUNTIME\nvoid *SWIG_ReturnGlobalTypeList(void *);\n#endif\n\nSWIGRUNTIME swig_type_info **\nSWIG_Python_GetTypeListHandle() {\n static void *type_pointer = (void *)0;\n /* first check if module already created */\n if (!type_pointer) {\n#ifdef SWIG_LINK_RUNTIME\n type_pointer = SWIG_ReturnGlobalTypeList((void *)0);\n#else\n type_pointer = PyCObject_Import((char*)\"swig_runtime_data\" SWIG_RUNTIME_VERSION,\n\t\t\t\t (char*)\"type_pointer\" SWIG_TYPE_TABLE_NAME);\n if (PyErr_Occurred()) {\n PyErr_Clear();\n type_pointer = (void *)0;\n }\n }\n#endif\n return (swig_type_info **) type_pointer;\n}\n\n/*\n Search for a swig_type_info structure\n */\nSWIGRUNTIMEINLINE swig_type_info *\nSWIG_Python_GetTypeList() {\n swig_type_info **tlh = SWIG_Python_GetTypeListHandle();\n return tlh ? *tlh : (swig_type_info*)0;\n}\n\n#define SWIG_Runtime_GetTypeList SWIG_Python_GetTypeList \n\n#ifdef __cplusplus\n}\n#endif\n\n/* -----------------------------------------------------------------------------*\n Standard SWIG API for use inside user code.\n \n You need to include in your code as follow:\n\n#include // or using your favorite language \n#include \n#include // or using your favorite language \n#include \n \n * -----------------------------------------------------------------------------*/\n\nSWIGRUNTIMEINLINE swig_type_info *\nSWIG_Runtime_TypeQuery(const char *name) {\n swig_type_info *tl = SWIG_Runtime_GetTypeList();\n return SWIG_TypeQueryTL(tl, name);\n}\n\nSWIGRUNTIMEINLINE swig_type_info *\nSWIG_Runtime_TypeRegister(swig_type_info *ti) {\n swig_type_info *tl = SWIG_Runtime_GetTypeList();\n return SWIG_TypeRegisterTL(&tl, ti);\n}\n\nSWIGRUNTIMEINLINE void\nSWIG_Runtime_TypeClientData(swig_type_info *ti, void *clientdata) {\n swig_type_info *tl = SWIG_Runtime_GetTypeList();\n SWIG_TypeClientDataTL(tl, ti, clientdata);\n}\n\nSWIGRUNTIMEINLINE void\nSWIG_Runtime_PropagateClientData(swig_type_info *type) {\n swig_type_info *tl = SWIG_Runtime_GetTypeList();\n SWIG_PropagateClientDataTL(tl, type);\n}\n\n#define SWIG_GetTypeList() SWIG_Runtime_GetTypeList()\n#define SWIG_TypeQuery(name) SWIG_Runtime_TypeQuery(name)\n#define SWIG_TypeRegister(ti) SWIG_Runtime_TypeRegister(ti)\n#define SWIG_TypeClientData(ti, cd) SWIG_Runtime_TypeClientData(ti, cd)\n#define SWIG_PropagateClientData(ti) SWIG_Runtime_PropagateClientData(ti)\n\n\"\"\"\n\n######################################################################\n# This is for SWIG-1.3.x where x >= 25.\n# SWIG_RUNTIME_VERSION == \"2\"\n\n# All this does is to include the contents of the file generated by\n# this command:\n# swig -python -external-runtime\nswigptr2_code_v2 = \"\"\"\n/* ----------------------------------------------------------------------------\n * This file was automatically generated by SWIG (http://www.swig.org).\n * Version 1.3.25\n * \n * This file is not intended to be easily readable and contains a number of \n * coding conventions designed to improve portability and efficiency. Do not make\n * changes to this file unless you know what you are doing--modify the SWIG \n * interface file instead. \n * ----------------------------------------------------------------------------- */\n\n/***********************************************************************\n *\n * This section contains generic SWIG labels for method/variable\n * declarations/attributes, and other compiler dependent labels.\n *\n ************************************************************************/\n\n/* \n SWIGTEMPLATEDISAMBIGUATOR is needed when wrapping template calls\n (cwrap.c:Swig_cfunction_call/Swig_cmethod_call), as in\n\n result = nspace::template function(arg1);\n result = arg1->template method(arg2);\n\n SWIGTEMPLATEDISAMBIGUATOR is compiler dependent (common.swg),\n - SUN Studio requires 'template', \n - gcc-3.4 forbids the use of 'template'.\n - gcc-3.2.3 produces internal errors if you use 'template'\n*/\n#ifndef SWIGTEMPLATEDISAMBIGUATOR\n# if defined(__SUNPRO_CC) \n# define SWIGTEMPLATEDISAMBIGUATOR template\n# else\n# define SWIGTEMPLATEDISAMBIGUATOR \n# endif\n#endif\n\n/* inline attribute */\n#ifndef SWIGINLINE\n# if defined(__cplusplus) || (defined(__GNUC__) && !defined(__STRICT_ANSI__))\n# define SWIGINLINE inline\n# else\n# define SWIGINLINE\n# endif\n#endif\n\n/* attritbute passed for some compilers to avoid 'unused' warnings */\n#ifndef SWIGUNUSED\n# if defined(__GNUC__) || defined(__ICC)\n# define SWIGUNUSED __attribute__ ((unused)) \n# else\n# define SWIGUNUSED \n# endif\n#endif\n\n/* internal SWIG method */\n#ifndef SWIGINTERN\n# define SWIGINTERN static SWIGUNUSED\n#endif\n\n/* internal inline SWIG method */\n#ifndef SWIGINTERNINLINE\n# define SWIGINTERNINLINE SWIGINTERN SWIGINLINE\n#endif\n\n/* how we export a method such that it can go in to a shared or dll library */\n#ifndef SWIGEXPORT\n# if defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__)\n# if defined(_MSC_VER) || defined(__GNUC__)\n# if defined(STATIC_LINKED)\n# define SWIGEXPORT(a) a\n# else\n# define SWIGEXPORT(a) __declspec(dllexport) a\n# endif\n# else\n# if defined(__BORLANDC__)\n# define SWIGEXPORT(a) a _export\n# else\n# define SWIGEXPORT(a) a\n# endif\n# endif\n# else\n# define SWIGEXPORT(a) a\n# endif\n#endif\n\n/***********************************************************************\n * swigrun.swg\n *\n * This file contains generic CAPI SWIG runtime support for pointer\n * type checking.\n *\n ************************************************************************/\n\n/* This should only be incremented when either the layout of swig_type_info changes,\n or for whatever reason, the runtime changes incompatibly */\n#define SWIG_RUNTIME_VERSION \"2\"\n\n/* define SWIG_TYPE_TABLE_NAME as \"SWIG_TYPE_TABLE\" */\n#ifdef SWIG_TYPE_TABLE\n# define SWIG_QUOTE_STRING(x) #x\n# define SWIG_EXPAND_AND_QUOTE_STRING(x) SWIG_QUOTE_STRING(x)\n# define SWIG_TYPE_TABLE_NAME SWIG_EXPAND_AND_QUOTE_STRING(SWIG_TYPE_TABLE)\n#else\n# define SWIG_TYPE_TABLE_NAME\n#endif\n\n/*\n You can use the SWIGRUNTIME and SWIGRUNTIMEINLINE macros for\n creating a static or dynamic library from the swig runtime code.\n In 99.9% of the cases, swig just needs to declare them as 'static'.\n \n But only do this if is strictly necessary, ie, if you have problems\n with your compiler or so.\n*/\n\n#ifndef SWIGRUNTIME\n# define SWIGRUNTIME SWIGINTERN\n#endif\n\n#ifndef SWIGRUNTIMEINLINE\n# define SWIGRUNTIMEINLINE SWIGRUNTIME SWIGINLINE\n#endif\n\n#include \n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\ntypedef void *(*swig_converter_func)(void *);\ntypedef struct swig_type_info *(*swig_dycast_func)(void **);\n\n/* Structure to store inforomation on one type */\ntypedef struct swig_type_info {\n const char *name;\t\t\t/* mangled name of this type */\n const char *str;\t\t\t/* human readable name of this type */\n swig_dycast_func dcast;\t\t/* dynamic cast function down a hierarchy */\n struct swig_cast_info *cast;\t\t\t/* linked list of types that can cast into this type */\n void *clientdata;\t\t/* language specific type data */\n} swig_type_info;\n\n/* Structure to store a type and conversion function used for casting */\ntypedef struct swig_cast_info {\n swig_type_info *type;\t\t\t/* pointer to type that is equivalent to this type */\n swig_converter_func converter;\t\t/* function to cast the void pointers */\n struct swig_cast_info *next;\t\t\t/* pointer to next cast in linked list */\n struct swig_cast_info *prev;\t\t\t/* pointer to the previous cast */\n} swig_cast_info;\n\n/* Structure used to store module information\n * Each module generates one structure like this, and the runtime collects\n * all of these structures and stores them in a circularly linked list.*/\ntypedef struct swig_module_info {\n swig_type_info **types;\t\t/* Array of pointers to swig_type_info structures that are in this module */\n size_t size;\t\t /* Number of types in this module */\n struct swig_module_info *next;\t\t/* Pointer to next element in circularly linked list */\n swig_type_info **type_initial;\t/* Array of initially generated type structures */\n swig_cast_info **cast_initial;\t/* Array of initially generated casting structures */\n void *clientdata;\t\t/* Language specific module data */\n} swig_module_info;\n\n\n/* \n Compare two type names skipping the space characters, therefore\n \"char*\" == \"char *\" and \"Class\" == \"Class\", etc.\n\n Return 0 when the two name types are equivalent, as in\n strncmp, but skipping ' '.\n*/\nSWIGRUNTIME int\nSWIG_TypeNameComp(const char *f1, const char *l1,\n\t\t const char *f2, const char *l2) {\n for (;(f1 != l1) && (f2 != l2); ++f1, ++f2) {\n while ((*f1 == ' ') && (f1 != l1)) ++f1;\n while ((*f2 == ' ') && (f2 != l2)) ++f2;\n if (*f1 != *f2) return (int)(*f1 - *f2);\n }\n return (l1 - f1) - (l2 - f2);\n}\n\n/*\n Check type equivalence in a name list like ||...\n Return 0 if not equal, 1 if equal\n*/\nSWIGRUNTIME int\nSWIG_TypeEquiv(const char *nb, const char *tb) {\n int equiv = 0;\n const char* te = tb + strlen(tb);\n const char* ne = nb;\n while (!equiv && *ne) {\n for (nb = ne; *ne; ++ne) {\n if (*ne == '|') break;\n }\n equiv = (SWIG_TypeNameComp(nb, ne, tb, te) == 0) ? 1 : 0;\n if (*ne) ++ne;\n }\n return equiv;\n}\n\n/*\n Check type equivalence in a name list like ||...\n Return 0 if equal, -1 if nb < tb, 1 if nb > tb\n*/\nSWIGRUNTIME int\nSWIG_TypeCompare(const char *nb, const char *tb) {\n int equiv = 0;\n const char* te = tb + strlen(tb);\n const char* ne = nb;\n while (!equiv && *ne) {\n for (nb = ne; *ne; ++ne) {\n if (*ne == '|') break;\n }\n equiv = (SWIG_TypeNameComp(nb, ne, tb, te) == 0) ? 1 : 0;\n if (*ne) ++ne;\n }\n return equiv;\n}\n\n\n/* think of this as a c++ template<> or a scheme macro */\n#define SWIG_TypeCheck_Template(comparison, ty) \\\n if (ty) { \\\n swig_cast_info *iter = ty->cast; \\\n while (iter) { \\\n if (comparison) { \\\n if (iter == ty->cast) return iter; \\\n /* Move iter to the top of the linked list */ \\\n iter->prev->next = iter->next; \\\n if (iter->next) \\\n iter->next->prev = iter->prev; \\\n iter->next = ty->cast; \\\n iter->prev = 0; \\\n if (ty->cast) ty->cast->prev = iter; \\\n ty->cast = iter; \\\n return iter; \\\n } \\\n iter = iter->next; \\\n } \\\n } \\\n return 0\n\n/*\n Check the typename\n*/\nSWIGRUNTIME swig_cast_info *\nSWIG_TypeCheck(const char *c, swig_type_info *ty) {\n SWIG_TypeCheck_Template(strcmp(iter->type->name, c) == 0, ty);\n}\n\n/* Same as previous function, except strcmp is replaced with a pointer comparison */\nSWIGRUNTIME swig_cast_info *\nSWIG_TypeCheckStruct(swig_type_info *from, swig_type_info *into) {\n SWIG_TypeCheck_Template(iter->type == from, into);\n}\n\n/*\n Cast a pointer up an inheritance hierarchy\n*/\nSWIGRUNTIMEINLINE void *\nSWIG_TypeCast(swig_cast_info *ty, void *ptr) {\n return ((!ty) || (!ty->converter)) ? ptr : (*ty->converter)(ptr);\n}\n\n/* \n Dynamic pointer casting. Down an inheritance hierarchy\n*/\nSWIGRUNTIME swig_type_info *\nSWIG_TypeDynamicCast(swig_type_info *ty, void **ptr) {\n swig_type_info *lastty = ty;\n if (!ty || !ty->dcast) return ty;\n while (ty && (ty->dcast)) {\n ty = (*ty->dcast)(ptr);\n if (ty) lastty = ty;\n }\n return lastty;\n}\n\n/*\n Return the name associated with this type\n*/\nSWIGRUNTIMEINLINE const char *\nSWIG_TypeName(const swig_type_info *ty) {\n return ty->name;\n}\n\n/*\n Return the pretty name associated with this type,\n that is an unmangled type name in a form presentable to the user.\n*/\nSWIGRUNTIME const char *\nSWIG_TypePrettyName(const swig_type_info *type) {\n /* The \"str\" field contains the equivalent pretty names of the\n type, separated by vertical-bar characters. We choose\n to print the last name, as it is often (?) the most\n specific. */\n if (type->str != NULL) {\n const char *last_name = type->str;\n const char *s;\n for (s = type->str; *s; s++)\n if (*s == '|') last_name = s+1;\n return last_name;\n }\n else\n return type->name;\n}\n\n/* \n Set the clientdata field for a type\n*/\nSWIGRUNTIME void\nSWIG_TypeClientData(swig_type_info *ti, void *clientdata) {\n if (!ti->clientdata) {\n swig_cast_info *cast = ti->cast;\n /* if (ti->clientdata == clientdata) return; */\n ti->clientdata = clientdata;\n \n while (cast) {\n if (!cast->converter)\n\tSWIG_TypeClientData(cast->type, clientdata);\n cast = cast->next;\n }\n }\n}\n\n/*\n Search for a swig_type_info structure only by mangled name\n Search is a O(log #types)\n \n We start searching at module start, and finish searching when start == end. \n Note: if start == end at the beginning of the function, we go all the way around\n the circular list.\n*/\nSWIGRUNTIME swig_type_info *\nSWIG_MangledTypeQueryModule(swig_module_info *start, \n swig_module_info *end, \n\t\t const char *name) {\n swig_module_info *iter = start;\n do {\n if (iter->size) {\n register size_t l = 0;\n register size_t r = iter->size - 1;\n do {\n\t/* since l+r >= 0, we can (>> 1) instead (/ 2) */\n\tregister size_t i = (l + r) >> 1; \n\tconst char *iname = iter->types[i]->name;\n\tif (iname) {\n\t register int compare = strcmp(name, iname);\n\t if (compare == 0) {\t \n\t return iter->types[i];\n\t } else if (compare < 0) {\n\t if (i) {\n\t r = i - 1;\n\t } else {\n\t break;\n\t }\n\t } else if (compare > 0) {\n\t l = i + 1;\n\t }\n\t} else {\n\t break; /* should never happen */\n\t}\n } while (l <= r);\n }\n iter = iter->next;\n } while (iter != end);\n return 0;\n}\n\n/*\n Search for a swig_type_info structure for either a mangled name or a human readable name.\n It first searches the mangled names of the types, which is a O(log #types)\n If a type is not found it then searches the human readable names, which is O(#types).\n \n We start searching at module start, and finish searching when start == end. \n Note: if start == end at the beginning of the function, we go all the way around\n the circular list.\n*/\nSWIGRUNTIME swig_type_info *\nSWIG_TypeQueryModule(swig_module_info *start, \n swig_module_info *end, \n\t\t const char *name) {\n /* STEP 1: Search the name field using binary search */\n swig_type_info *ret = SWIG_MangledTypeQueryModule(start, end, name);\n if (ret) {\n return ret;\n } else {\n /* STEP 2: If the type hasn't been found, do a complete search\n of the str field (the human readable name) */\n swig_module_info *iter = start;\n do {\n register size_t i = 0;\n for (; i < iter->size; ++i) {\n\tif (iter->types[i]->str && (SWIG_TypeEquiv(iter->types[i]->str, name)))\n\t return iter->types[i];\n }\n iter = iter->next;\n } while (iter != end);\n }\n \n /* neither found a match */\n return 0;\n}\n\n\n/* \n Pack binary data into a string\n*/\nSWIGRUNTIME char *\nSWIG_PackData(char *c, void *ptr, size_t sz) {\n static const char hex[17] = \"0123456789abcdef\";\n register const unsigned char *u = (unsigned char *) ptr;\n register const unsigned char *eu = u + sz;\n for (; u != eu; ++u) {\n register unsigned char uu = *u;\n *(c++) = hex[(uu & 0xf0) >> 4];\n *(c++) = hex[uu & 0xf];\n }\n return c;\n}\n\n/* \n Unpack binary data from a string\n*/\nSWIGRUNTIME const char *\nSWIG_UnpackData(const char *c, void *ptr, size_t sz) {\n register unsigned char *u = (unsigned char *) ptr;\n register const unsigned char *eu = u + sz;\n for (; u != eu; ++u) {\n register char d = *(c++);\n register unsigned char uu = 0;\n if ((d >= '0') && (d <= '9'))\n uu = ((d - '0') << 4);\n else if ((d >= 'a') && (d <= 'f'))\n uu = ((d - ('a'-10)) << 4);\n else \n return (char *) 0;\n d = *(c++);\n if ((d >= '0') && (d <= '9'))\n uu |= (d - '0');\n else if ((d >= 'a') && (d <= 'f'))\n uu |= (d - ('a'-10));\n else \n return (char *) 0;\n *u = uu;\n }\n return c;\n}\n\n/* \n Pack 'void *' into a string buffer.\n*/\nSWIGRUNTIME char *\nSWIG_PackVoidPtr(char *buff, void *ptr, const char *name, size_t bsz) {\n char *r = buff;\n if ((2*sizeof(void *) + 2) > bsz) return 0;\n *(r++) = '_';\n r = SWIG_PackData(r,&ptr,sizeof(void *));\n if (strlen(name) + 1 > (bsz - (r - buff))) return 0;\n strcpy(r,name);\n return buff;\n}\n\nSWIGRUNTIME const char *\nSWIG_UnpackVoidPtr(const char *c, void **ptr, const char *name) {\n if (*c != '_') {\n if (strcmp(c,\"NULL\") == 0) {\n *ptr = (void *) 0;\n return name;\n } else {\n return 0;\n }\n }\n return SWIG_UnpackData(++c,ptr,sizeof(void *));\n}\n\nSWIGRUNTIME char *\nSWIG_PackDataName(char *buff, void *ptr, size_t sz, const char *name, size_t bsz) {\n char *r = buff;\n size_t lname = (name ? strlen(name) : 0);\n if ((2*sz + 2 + lname) > bsz) return 0;\n *(r++) = '_';\n r = SWIG_PackData(r,ptr,sz);\n if (lname) {\n strncpy(r,name,lname+1);\n } else {\n *r = 0;\n }\n return buff;\n}\n\nSWIGRUNTIME const char *\nSWIG_UnpackDataName(const char *c, void *ptr, size_t sz, const char *name) {\n if (*c != '_') {\n if (strcmp(c,\"NULL\") == 0) {\n memset(ptr,0,sz);\n return name;\n } else {\n return 0;\n }\n }\n return SWIG_UnpackData(++c,ptr,sz);\n}\n\n#ifdef __cplusplus\n}\n#endif\n\n/***********************************************************************\n * pyrun.swg\n *\n * This file contains the runtime support for Python modules\n * and includes code for managing global variables and pointer\n * type checking.\n *\n * Author : David Beazley (beazley@cs.uchicago.edu)\n ************************************************************************/\n\n/* Common SWIG API */\n#define SWIG_ConvertPtr(obj, pp, type, flags) SWIG_Python_ConvertPtr(obj, pp, type, flags)\n#define SWIG_NewPointerObj(p, type, flags) SWIG_Python_NewPointerObj(p, type, flags)\n#define SWIG_MustGetPtr(p, type, argnum, flags) SWIG_Python_MustGetPtr(p, type, argnum, flags)\n \n\n/* Python-specific SWIG API */\n#define SWIG_ConvertPacked(obj, ptr, sz, ty, flags) SWIG_Python_ConvertPacked(obj, ptr, sz, ty, flags)\n#define SWIG_NewPackedObj(ptr, sz, type) SWIG_Python_NewPackedObj(ptr, sz, type)\n\n/* Runtime API */\n#define SWIG_GetModule(clientdata) SWIG_Python_GetModule()\n#define SWIG_SetModule(clientdata, pointer) SWIG_Python_SetModule(pointer)\n\n/* -----------------------------------------------------------------------------\n * Pointer declarations\n * ----------------------------------------------------------------------------- */\n/*\n Use SWIG_NO_COBJECT_TYPES to force the use of strings to represent\n C/C++ pointers in the python side. Very useful for debugging, but\n not always safe.\n*/\n#if !defined(SWIG_NO_COBJECT_TYPES) && !defined(SWIG_COBJECT_TYPES)\n# define SWIG_COBJECT_TYPES\n#endif\n\n/* Flags for pointer conversion */\n#define SWIG_POINTER_EXCEPTION 0x1\n#define SWIG_POINTER_DISOWN 0x2\n\n\n/* Add PyOS_snprintf for old Pythons */\n#if PY_VERSION_HEX < 0x02020000\n#define PyOS_snprintf snprintf\n#endif\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n/* -----------------------------------------------------------------------------\n * Create a new pointer string \n * ----------------------------------------------------------------------------- */\n#ifndef SWIG_BUFFER_SIZE\n#define SWIG_BUFFER_SIZE 1024\n#endif\n\n#if defined(SWIG_COBJECT_TYPES)\n#if !defined(SWIG_COBJECT_PYTHON)\n/* -----------------------------------------------------------------------------\n * Implements a simple Swig Object type, and use it instead of PyCObject\n * ----------------------------------------------------------------------------- */\n\ntypedef struct {\n PyObject_HEAD\n void *ptr;\n const char *desc;\n} PySwigObject;\n\n/* Declarations for objects of type PySwigObject */\n\nSWIGRUNTIME int\nPySwigObject_print(PySwigObject *v, FILE *fp, int flags)\n{\n char result[SWIG_BUFFER_SIZE];\n flags = flags;\n if (SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result))) {\n fputs(\"\", fp);\n return 0; \n } else {\n return 1; \n }\n}\n \nSWIGRUNTIME PyObject *\nPySwigObject_repr(PySwigObject *v)\n{\n char result[SWIG_BUFFER_SIZE];\n return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?\n PyString_FromFormat(\"\", result) : 0;\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_str(PySwigObject *v)\n{\n char result[SWIG_BUFFER_SIZE];\n return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?\n PyString_FromString(result) : 0;\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_long(PySwigObject *v)\n{\n return PyLong_FromVoidPtr(v->ptr);\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_format(const char* fmt, PySwigObject *v)\n{\n PyObject *res = NULL;\n PyObject *args = PyTuple_New(1);\n if (args && (PyTuple_SetItem(args, 0, PySwigObject_long(v)) == 0)) {\n PyObject *ofmt = PyString_FromString(fmt);\n if (ofmt) {\n res = PyString_Format(ofmt,args);\n Py_DECREF(ofmt);\n }\n Py_DECREF(args);\n } \n return res;\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_oct(PySwigObject *v)\n{\n return PySwigObject_format(\"%o\",v);\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_hex(PySwigObject *v)\n{\n return PySwigObject_format(\"%x\",v);\n}\n\nSWIGRUNTIME int\nPySwigObject_compare(PySwigObject *v, PySwigObject *w)\n{\n int c = strcmp(v->desc, w->desc);\n if (c) {\n return (c > 0) ? 1 : -1;\n } else {\n void *i = v->ptr;\n void *j = w->ptr;\n return (i < j) ? -1 : ((i > j) ? 1 : 0);\n }\n}\n\nSWIGRUNTIME void\nPySwigObject_dealloc(PySwigObject *self)\n{\n PyObject_DEL(self);\n}\n\nSWIGRUNTIME PyTypeObject*\nPySwigObject_type(void) {\n static char pyswigobject_type__doc__[] = \n \"Swig object carries a C/C++ instance pointer\";\n \n static PyNumberMethods PySwigObject_as_number = {\n (binaryfunc)0, /*nb_add*/\n (binaryfunc)0, /*nb_subtract*/\n (binaryfunc)0, /*nb_multiply*/\n (binaryfunc)0, /*nb_divide*/\n (binaryfunc)0, /*nb_remainder*/\n (binaryfunc)0, /*nb_divmod*/\n (ternaryfunc)0,/*nb_power*/\n (unaryfunc)0, /*nb_negative*/\n (unaryfunc)0, /*nb_positive*/\n (unaryfunc)0, /*nb_absolute*/\n (inquiry)0, /*nb_nonzero*/\n 0,\t\t /*nb_invert*/\n 0,\t\t /*nb_lshift*/\n 0,\t\t /*nb_rshift*/\n 0,\t\t /*nb_and*/\n 0,\t\t /*nb_xor*/\n 0,\t\t /*nb_or*/\n (coercion)0, /*nb_coerce*/\n (unaryfunc)PySwigObject_long, /*nb_int*/\n (unaryfunc)PySwigObject_long, /*nb_long*/\n (unaryfunc)0, /*nb_float*/\n (unaryfunc)PySwigObject_oct, /*nb_oct*/\n (unaryfunc)PySwigObject_hex, /*nb_hex*/\n#if PY_VERSION_HEX >= 0x02000000\n 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 /* nb_inplace_add -> nb_inplace_true_divide */ \n#endif\n };\n\n static PyTypeObject pyswigobject_type\n#if !defined(__cplusplus)\n ; \n static int type_init = 0;\n if (!type_init) {\n PyTypeObject tmp\n#endif\n = {\n PyObject_HEAD_INIT(&PyType_Type)\n 0,\t\t\t\t\t/*ob_size*/\n \"PySwigObject\",\t\t\t/*tp_name*/\n sizeof(PySwigObject),\t\t/*tp_basicsize*/\n 0,\t\t\t\t\t/*tp_itemsize*/\n /* methods */\n (destructor)PySwigObject_dealloc,\t/*tp_dealloc*/\n (printfunc)PySwigObject_print,\t/*tp_print*/\n (getattrfunc)0,\t\t\t/*tp_getattr*/\n (setattrfunc)0,\t\t\t/*tp_setattr*/\n (cmpfunc)PySwigObject_compare,\t/*tp_compare*/\n (reprfunc)PySwigObject_repr,\t/*tp_repr*/\n &PySwigObject_as_number,\t /*tp_as_number*/\n 0,\t\t\t\t\t/*tp_as_sequence*/\n 0,\t\t\t\t\t/*tp_as_mapping*/\n (hashfunc)0,\t\t\t/*tp_hash*/\n (ternaryfunc)0,\t\t\t/*tp_call*/\n (reprfunc)PySwigObject_str,\t\t/*tp_str*/\n /* Space for future expansion */\n 0,0,0,0,\n pyswigobject_type__doc__, \t /* Documentation string */\n#if PY_VERSION_HEX >= 0x02000000\n 0, /* tp_traverse */\n 0, /* tp_clear */\n#endif\n#if PY_VERSION_HEX >= 0x02010000\n 0, /* tp_richcompare */\n 0, /* tp_weaklistoffset */\n#endif\n#if PY_VERSION_HEX >= 0x02020000\n 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */\n#endif\n#if PY_VERSION_HEX >= 0x02030000\n 0, /* tp_del */\n#endif\n#ifdef COUNT_ALLOCS\n 0,0,0,0 /* tp_alloc -> tp_next */\n#endif\n };\n#if !defined(__cplusplus)\n pyswigobject_type = tmp;\n type_init = 1;\n }\n#endif\n return &pyswigobject_type;\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_FromVoidPtrAndDesc(void *ptr, const char *desc)\n{\n PySwigObject *self = PyObject_NEW(PySwigObject, PySwigObject_type());\n if (self) {\n self->ptr = ptr;\n self->desc = desc;\n }\n return (PyObject *)self;\n}\n\nSWIGRUNTIMEINLINE void *\nPySwigObject_AsVoidPtr(PyObject *self)\n{\n return ((PySwigObject *)self)->ptr;\n}\n\nSWIGRUNTIMEINLINE const char *\nPySwigObject_GetDesc(PyObject *self)\n{\n return ((PySwigObject *)self)->desc;\n}\n\nSWIGRUNTIMEINLINE int\nPySwigObject_Check(PyObject *op) {\n return ((op)->ob_type == PySwigObject_type()) \n || (strcmp((op)->ob_type->tp_name,\"PySwigObject\") == 0);\n}\n\n/* -----------------------------------------------------------------------------\n * Implements a simple Swig Packed type, and use it instead of string\n * ----------------------------------------------------------------------------- */\n\ntypedef struct {\n PyObject_HEAD\n void *pack;\n const char *desc;\n size_t size;\n} PySwigPacked;\n\nSWIGRUNTIME int\nPySwigPacked_print(PySwigPacked *v, FILE *fp, int flags)\n{\n char result[SWIG_BUFFER_SIZE];\n flags = flags;\n fputs(\"pack, v->size, 0, sizeof(result))) {\n fputs(\"at \", fp); \n fputs(result, fp); \n }\n fputs(v->desc,fp); \n fputs(\">\", fp);\n return 0; \n}\n \nSWIGRUNTIME PyObject *\nPySwigPacked_repr(PySwigPacked *v)\n{\n char result[SWIG_BUFFER_SIZE];\n if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))) {\n return PyString_FromFormat(\"\", result, v->desc);\n } else {\n return PyString_FromFormat(\"\", v->desc);\n } \n}\n\nSWIGRUNTIME PyObject *\nPySwigPacked_str(PySwigPacked *v)\n{\n char result[SWIG_BUFFER_SIZE];\n if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))){\n return PyString_FromFormat(\"%s%s\", result, v->desc);\n } else {\n return PyString_FromFormat(\"%s\", v->desc);\n } \n}\n\nSWIGRUNTIME int\nPySwigPacked_compare(PySwigPacked *v, PySwigPacked *w)\n{\n int c = strcmp(v->desc, w->desc);\n if (c) {\n return (c > 0) ? 1 : -1;\n } else {\n size_t i = v->size;\n size_t j = w->size;\n int s = (i < j) ? -1 : ((i > j) ? 1 : 0);\n return s ? s : strncmp((char *)v->pack, (char *)w->pack, 2*v->size);\n }\n}\n\nSWIGRUNTIME void\nPySwigPacked_dealloc(PySwigPacked *self)\n{\n free(self->pack);\n PyObject_DEL(self);\n}\n\nSWIGRUNTIME PyTypeObject*\nPySwigPacked_type(void) {\n static char pyswigpacked_type__doc__[] = \n \"Swig object carries a C/C++ instance pointer\";\n static PyTypeObject pyswigpacked_type\n#if !defined(__cplusplus)\n ;\n static int type_init = 0; \n if (!type_init) {\n PyTypeObject tmp\n#endif\n = {\n PyObject_HEAD_INIT(&PyType_Type)\n 0,\t\t\t\t\t/*ob_size*/\n \"PySwigPacked\",\t\t\t/*tp_name*/\n sizeof(PySwigPacked),\t\t/*tp_basicsize*/\n 0,\t\t\t\t\t/*tp_itemsize*/\n /* methods */\n (destructor)PySwigPacked_dealloc,\t/*tp_dealloc*/\n (printfunc)PySwigPacked_print,\t/*tp_print*/\n (getattrfunc)0,\t\t\t/*tp_getattr*/\n (setattrfunc)0,\t\t\t/*tp_setattr*/\n (cmpfunc)PySwigPacked_compare,\t/*tp_compare*/\n (reprfunc)PySwigPacked_repr,\t/*tp_repr*/\n 0,\t /*tp_as_number*/\n 0,\t\t\t\t\t/*tp_as_sequence*/\n 0,\t\t\t\t\t/*tp_as_mapping*/\n (hashfunc)0,\t\t\t/*tp_hash*/\n (ternaryfunc)0,\t\t\t/*tp_call*/\n (reprfunc)PySwigPacked_str,\t\t/*tp_str*/\n /* Space for future expansion */\n 0,0,0,0,\n pyswigpacked_type__doc__, \t /* Documentation string */\n#if PY_VERSION_HEX >= 0x02000000\n 0, /* tp_traverse */\n 0, /* tp_clear */\n#endif\n#if PY_VERSION_HEX >= 0x02010000\n 0, /* tp_richcompare */\n 0, /* tp_weaklistoffset */\n#endif\n#if PY_VERSION_HEX >= 0x02020000 \n 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */\n#endif\n#if PY_VERSION_HEX >= 0x02030000\n 0, /* tp_del */\n#endif\n#ifdef COUNT_ALLOCS\n 0,0,0,0 /* tp_alloc -> tp_next */\n#endif\n };\n#if !defined(__cplusplus)\n pyswigpacked_type = tmp;\n type_init = 1;\n }\n#endif\n return &pyswigpacked_type;\n}\n\nSWIGRUNTIME PyObject *\nPySwigPacked_FromDataAndDesc(void *ptr, size_t size, const char *desc)\n{\n PySwigPacked *self = PyObject_NEW(PySwigPacked, PySwigPacked_type());\n if (self == NULL) {\n return NULL;\n } else {\n void *pack = malloc(size);\n if (pack) {\n memcpy(pack, ptr, size);\n self->pack = pack;\n self->desc = desc;\n self->size = size;\n return (PyObject *) self;\n }\n return NULL;\n }\n}\n\nSWIGRUNTIMEINLINE const char *\nPySwigPacked_UnpackData(PyObject *obj, void *ptr, size_t size)\n{\n PySwigPacked *self = (PySwigPacked *)obj;\n if (self->size != size) return 0;\n memcpy(ptr, self->pack, size);\n return self->desc;\n}\n\nSWIGRUNTIMEINLINE const char *\nPySwigPacked_GetDesc(PyObject *self)\n{\n return ((PySwigPacked *)self)->desc;\n}\n\nSWIGRUNTIMEINLINE int\nPySwigPacked_Check(PyObject *op) {\n return ((op)->ob_type == PySwigPacked_type()) \n || (strcmp((op)->ob_type->tp_name,\"PySwigPacked\") == 0);\n}\n\n#else\n/* -----------------------------------------------------------------------------\n * Use the old Python PyCObject instead of PySwigObject\n * ----------------------------------------------------------------------------- */\n\n#define PySwigObject_GetDesc(obj)\t PyCObject_GetDesc(obj)\n#define PySwigObject_Check(obj)\t PyCObject_Check(obj)\n#define PySwigObject_AsVoidPtr(obj)\t PyCObject_AsVoidPtr(obj)\n#define PySwigObject_FromVoidPtrAndDesc(p, d) PyCObject_FromVoidPtrAndDesc(p, d, NULL)\n\n#endif\n\n#endif\n\n/* -----------------------------------------------------------------------------\n * errors manipulation\n * ----------------------------------------------------------------------------- */\n\nSWIGRUNTIME void\nSWIG_Python_TypeError(const char *type, PyObject *obj)\n{\n if (type) {\n#if defined(SWIG_COBJECT_TYPES)\n if (obj && PySwigObject_Check(obj)) {\n const char *otype = (const char *) PySwigObject_GetDesc(obj);\n if (otype) {\n\tPyErr_Format(PyExc_TypeError, \"a '%s' is expected, 'PySwigObject(%s)' is received\",\n\t\t type, otype);\n\treturn;\n }\n } else \n#endif \n {\n const char *otype = (obj ? obj->ob_type->tp_name : 0); \n if (otype) {\n\tPyObject *str = PyObject_Str(obj);\n\tconst char *cstr = str ? PyString_AsString(str) : 0;\n\tif (cstr) {\n\t PyErr_Format(PyExc_TypeError, \"a '%s' is expected, '%s(%s)' is received\",\n\t\t type, otype, cstr);\n\t} else {\n\t PyErr_Format(PyExc_TypeError, \"a '%s' is expected, '%s' is received\",\n\t\t type, otype);\n\t}\n\tPy_XDECREF(str);\n\treturn;\n }\n } \n PyErr_Format(PyExc_TypeError, \"a '%s' is expected\", type);\n } else {\n PyErr_Format(PyExc_TypeError, \"unexpected type is received\");\n }\n}\n\nSWIGRUNTIMEINLINE void\nSWIG_Python_NullRef(const char *type)\n{\n if (type) {\n PyErr_Format(PyExc_TypeError, \"null reference of type '%s' was received\",type);\n } else {\n PyErr_Format(PyExc_TypeError, \"null reference was received\");\n }\n}\n\nSWIGRUNTIME int\nSWIG_Python_AddErrMesg(const char* mesg, int infront)\n{\n if (PyErr_Occurred()) {\n PyObject *type = 0;\n PyObject *value = 0;\n PyObject *traceback = 0;\n PyErr_Fetch(&type, &value, &traceback);\n if (value) {\n PyObject *old_str = PyObject_Str(value);\n Py_XINCREF(type);\n PyErr_Clear();\n if (infront) {\n\tPyErr_Format(type, \"%s %s\", mesg, PyString_AsString(old_str));\n } else {\n\tPyErr_Format(type, \"%s %s\", PyString_AsString(old_str), mesg);\n }\n Py_DECREF(old_str);\n }\n return 1;\n } else {\n return 0;\n }\n}\n\nSWIGRUNTIME int\nSWIG_Python_ArgFail(int argnum)\n{\n if (PyErr_Occurred()) {\n /* add information about failing argument */\n char mesg[256];\n PyOS_snprintf(mesg, sizeof(mesg), \"argument number %d:\", argnum);\n return SWIG_Python_AddErrMesg(mesg, 1);\n } else {\n return 0;\n }\n}\n\n\n/* -----------------------------------------------------------------------------\n * pointers/data manipulation\n * ----------------------------------------------------------------------------- */\n\n/* Convert a pointer value */\nSWIGRUNTIME int\nSWIG_Python_ConvertPtr(PyObject *obj, void **ptr, swig_type_info *ty, int flags) {\n swig_cast_info *tc;\n const char *c = 0;\n static PyObject *SWIG_this = 0;\n int newref = 0;\n PyObject *pyobj = 0;\n void *vptr;\n \n if (!obj) return 0;\n if (obj == Py_None) {\n *ptr = 0;\n return 0;\n }\n\n#ifdef SWIG_COBJECT_TYPES\n if (!(PySwigObject_Check(obj))) {\n if (!SWIG_this)\n SWIG_this = PyString_FromString(\"this\");\n pyobj = obj;\n obj = PyObject_GetAttr(obj,SWIG_this);\n newref = 1;\n if (!obj) goto type_error;\n if (!PySwigObject_Check(obj)) {\n Py_DECREF(obj);\n goto type_error;\n }\n } \n vptr = PySwigObject_AsVoidPtr(obj);\n c = (const char *) PySwigObject_GetDesc(obj);\n if (newref) { Py_DECREF(obj); }\n goto type_check;\n#else\n if (!(PyString_Check(obj))) {\n if (!SWIG_this)\n SWIG_this = PyString_FromString(\"this\");\n pyobj = obj;\n obj = PyObject_GetAttr(obj,SWIG_this);\n newref = 1;\n if (!obj) goto type_error;\n if (!PyString_Check(obj)) {\n Py_DECREF(obj);\n goto type_error;\n }\n } \n c = PyString_AS_STRING(obj);\n /* Pointer values must start with leading underscore */\n c = SWIG_UnpackVoidPtr(c, &vptr, ty->name);\n if (newref) { Py_DECREF(obj); }\n if (!c) goto type_error;\n#endif\n\ntype_check:\n if (ty) {\n tc = SWIG_TypeCheck(c,ty);\n if (!tc) goto type_error;\n *ptr = SWIG_TypeCast(tc,vptr);\n } else {\n *ptr = vptr;\n }\n if ((pyobj) && (flags & SWIG_POINTER_DISOWN)) {\n PyObject_SetAttrString(pyobj,(char*)\"thisown\",Py_False);\n }\n return 0;\n\ntype_error:\n PyErr_Clear();\n if (pyobj && !obj) { \n obj = pyobj;\n if (PyCFunction_Check(obj)) {\n /* here we get the method pointer for callbacks */\n char *doc = (((PyCFunctionObject *)obj) -> m_ml -> ml_doc);\n c = doc ? strstr(doc, \"swig_ptr: \") : 0;\n if (c) {\n\tc = ty ? SWIG_UnpackVoidPtr(c + 10, &vptr, ty->name) : 0;\n\tif (!c) goto type_error;\n\tgoto type_check;\n }\n }\n }\n if (flags & SWIG_POINTER_EXCEPTION) {\n if (ty) {\n SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n } else {\n SWIG_Python_TypeError(\"C/C++ pointer\", obj);\n }\n }\n return -1;\n}\n\n/* Convert a pointer value, signal an exception on a type mismatch */\nSWIGRUNTIME void *\nSWIG_Python_MustGetPtr(PyObject *obj, swig_type_info *ty, int argnum, int flags) {\n void *result;\n if (SWIG_Python_ConvertPtr(obj, &result, ty, flags) == -1) {\n PyErr_Clear();\n if (flags & SWIG_POINTER_EXCEPTION) {\n SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n SWIG_Python_ArgFail(argnum);\n }\n }\n return result;\n}\n\n/* Convert a packed value value */\nSWIGRUNTIME int\nSWIG_Python_ConvertPacked(PyObject *obj, void *ptr, size_t sz, swig_type_info *ty, int flags) {\n swig_cast_info *tc;\n const char *c = 0;\n\n#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)\n c = PySwigPacked_UnpackData(obj, ptr, sz);\n#else\n if ((!obj) || (!PyString_Check(obj))) goto type_error;\n c = PyString_AS_STRING(obj);\n /* Pointer values must start with leading underscore */\n c = SWIG_UnpackDataName(c, ptr, sz, ty->name);\n#endif\n if (!c) goto type_error;\n if (ty) {\n tc = SWIG_TypeCheck(c,ty);\n if (!tc) goto type_error;\n }\n return 0;\n\ntype_error:\n PyErr_Clear();\n if (flags & SWIG_POINTER_EXCEPTION) {\n if (ty) {\n SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n } else {\n SWIG_Python_TypeError(\"C/C++ packed data\", obj);\n }\n }\n return -1;\n} \n\n/* Create a new array object */\nSWIGRUNTIME PyObject *\nSWIG_Python_NewPointerObj(void *ptr, swig_type_info *type, int own) {\n PyObject *robj = 0;\n if (!type) {\n if (!PyErr_Occurred()) {\n PyErr_Format(PyExc_TypeError, \"Swig: null type passed to NewPointerObj\");\n }\n return robj;\n }\n if (!ptr) {\n Py_INCREF(Py_None);\n return Py_None;\n }\n#ifdef SWIG_COBJECT_TYPES\n robj = PySwigObject_FromVoidPtrAndDesc((void *) ptr, (char *)type->name);\n#else\n {\n char result[SWIG_BUFFER_SIZE];\n robj = SWIG_PackVoidPtr(result, ptr, type->name, sizeof(result)) ?\n PyString_FromString(result) : 0;\n }\n#endif\n if (!robj || (robj == Py_None)) return robj;\n if (type->clientdata) {\n PyObject *inst;\n PyObject *args = Py_BuildValue((char*)\"(O)\", robj);\n Py_DECREF(robj);\n inst = PyObject_CallObject((PyObject *) type->clientdata, args);\n Py_DECREF(args);\n if (inst) {\n if (own) {\n PyObject_SetAttrString(inst,(char*)\"thisown\",Py_True);\n }\n robj = inst;\n }\n }\n return robj;\n}\n\nSWIGRUNTIME PyObject *\nSWIG_Python_NewPackedObj(void *ptr, size_t sz, swig_type_info *type) {\n PyObject *robj = 0;\n if (!ptr) {\n Py_INCREF(Py_None);\n return Py_None;\n }\n#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)\n robj = PySwigPacked_FromDataAndDesc((void *) ptr, sz, (char *)type->name);\n#else\n {\n char result[SWIG_BUFFER_SIZE];\n robj = SWIG_PackDataName(result, ptr, sz, type->name, sizeof(result)) ?\n PyString_FromString(result) : 0;\n }\n#endif\n return robj;\n}\n\n/* -----------------------------------------------------------------------------*\n * Get type list \n * -----------------------------------------------------------------------------*/\n\n#ifdef SWIG_LINK_RUNTIME\nvoid *SWIG_ReturnGlobalTypeList(void *);\n#endif\n\nSWIGRUNTIME swig_module_info *\nSWIG_Python_GetModule(void) {\n static void *type_pointer = (void *)0;\n /* first check if module already created */\n if (!type_pointer) {\n#ifdef SWIG_LINK_RUNTIME\n type_pointer = SWIG_ReturnGlobalTypeList((void *)0);\n#else\n type_pointer = PyCObject_Import((char*)\"swig_runtime_data\" SWIG_RUNTIME_VERSION,\n\t\t\t\t (char*)\"type_pointer\" SWIG_TYPE_TABLE_NAME);\n if (PyErr_Occurred()) {\n PyErr_Clear();\n type_pointer = (void *)0;\n }\n }\n#endif\n return (swig_module_info *) type_pointer;\n}\n\nSWIGRUNTIME void\nSWIG_Python_SetModule(swig_module_info *swig_module) {\n static PyMethodDef swig_empty_runtime_method_table[] = { {NULL, NULL, 0, NULL} };/* Sentinel */\n\n PyObject *module = Py_InitModule((char*)\"swig_runtime_data\" SWIG_RUNTIME_VERSION,\n\t\t\t\t swig_empty_runtime_method_table);\n PyObject *pointer = PyCObject_FromVoidPtr((void *) swig_module, NULL);\n if (pointer && module) {\n PyModule_AddObject(module, (char*)\"type_pointer\" SWIG_TYPE_TABLE_NAME, pointer);\n }\n}\n\n#ifdef __cplusplus\n}\n#endif\n\n/* -----------------------------------------------------------------------------*\n Standard SWIG API for use inside user code.\n \n Don't include this file directly, run the command\n swig -python -external-runtime\n Also, read the Modules chapter of the SWIG Manual.\n \n * -----------------------------------------------------------------------------*/\n\n#ifdef SWIG_MODULE_CLIENTDATA_TYPE\n\nSWIGRUNTIMEINLINE swig_type_info *\nSWIG_TypeQuery(SWIG_MODULE_CLIENTDATA_TYPE clientdata, const char *name) {\n swig_module_info *module = SWIG_GetModule(clientdata);\n return SWIG_TypeQueryModule(module, module, name);\n}\n\nSWIGRUNTIMEINLINE swig_type_info *\nSWIG_MangledTypeQuery(SWIG_MODULE_CLIENTDATA_TYPE clientdata, const char *name) {\n swig_module_info *module = SWIG_GetModule(clientdata);\n return SWIG_MangledTypeQueryModule(module, module, name);\n}\n\n#else\n\nSWIGRUNTIMEINLINE swig_type_info *\nSWIG_TypeQuery(const char *name) {\n swig_module_info *module = SWIG_GetModule();\n return SWIG_TypeQueryModule(module, module, name);\n}\n\nSWIGRUNTIMEINLINE swig_type_info *\nSWIG_MangledTypeQuery(const char *name) {\n swig_module_info *module = SWIG_GetModule();\n return SWIG_MangledTypeQueryModule(module, module, name);\n}\n\n#endif\n\n\n\"\"\"\n", "source_code_before": "# This code allows one to use SWIG wrapped objects from weave. This\n# code is specific to SWIG-1.3 and above where things are different.\n# The code is basically all copied out from the SWIG wrapper code but\n# it has been hand edited for brevity.\n#\n# Prabhu Ramachandran \n\n######################################################################\n# This is for SWIG-1.3.x where x < 22.\n# Essentially, SWIG_RUNTIME_VERSION was not yet used.\nswigptr2_code_v0 = \"\"\"\n\n#include \"Python.h\"\n\n/*************************************************************** -*- c -*-\n * python/precommon.swg\n *\n * Rename all exported symbols from common.swg, to avoid symbol\n * clashes if multiple interpreters are included\n *\n ************************************************************************/\n\n#define SWIG_TypeCheck SWIG_Python_TypeCheck\n#define SWIG_TypeCast SWIG_Python_TypeCast\n#define SWIG_TypeName SWIG_Python_TypeName\n#define SWIG_TypeQuery SWIG_Python_TypeQuery\n#define SWIG_PackData SWIG_Python_PackData \n#define SWIG_UnpackData SWIG_Python_UnpackData \n\n\n/***********************************************************************\n * common.swg\n *\n * This file contains generic SWIG runtime support for pointer\n * type checking as well as a few commonly used macros to control\n * external linkage.\n *\n * Author : David Beazley (beazley@cs.uchicago.edu)\n *\n * Copyright (c) 1999-2000, The University of Chicago\n * \n * This file may be freely redistributed without license or fee provided\n * this copyright message remains intact.\n ************************************************************************/\n\n#include \n\n#if defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__)\n# if defined(_MSC_VER) || defined(__GNUC__)\n# if defined(STATIC_LINKED)\n# define SWIGEXPORT(a) a\n# define SWIGIMPORT(a) extern a\n# else\n# define SWIGEXPORT(a) __declspec(dllexport) a\n# define SWIGIMPORT(a) extern a\n# endif\n# else\n# if defined(__BORLANDC__)\n# define SWIGEXPORT(a) a _export\n# define SWIGIMPORT(a) a _export\n# else\n# define SWIGEXPORT(a) a\n# define SWIGIMPORT(a) a\n# endif\n# endif\n#else\n# define SWIGEXPORT(a) a\n# define SWIGIMPORT(a) a\n#endif\n\n#ifdef SWIG_GLOBAL\n# define SWIGRUNTIME(a) SWIGEXPORT(a)\n#else\n# define SWIGRUNTIME(a) static a\n#endif\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\ntypedef void *(*swig_converter_func)(void *);\ntypedef struct swig_type_info *(*swig_dycast_func)(void **);\n\ntypedef struct swig_type_info {\n const char *name;\n swig_converter_func converter;\n const char *str;\n void *clientdata;\n swig_dycast_func dcast;\n struct swig_type_info *next;\n struct swig_type_info *prev;\n} swig_type_info;\n\n#ifdef SWIG_NOINCLUDE\n\nSWIGIMPORT(swig_type_info *) SWIG_TypeCheck(char *c, swig_type_info *);\nSWIGIMPORT(void *) SWIG_TypeCast(swig_type_info *, void *);\nSWIGIMPORT(const char *) SWIG_TypeName(const swig_type_info *);\nSWIGIMPORT(swig_type_info *) SWIG_TypeQuery(const char *);\nSWIGIMPORT(char *) SWIG_PackData(char *, void *, int);\nSWIGIMPORT(char *) SWIG_UnpackData(char *, void *, int);\n\n#else\n\nstatic swig_type_info *swig_type_list = 0;\n\n/* Check the typename */\nSWIGRUNTIME(swig_type_info *) \nSWIG_TypeCheck(char *c, swig_type_info *ty) {\n swig_type_info *s;\n if (!ty) return 0; /* Void pointer */\n s = ty->next; /* First element always just a name */\n do {\n if (strcmp(s->name,c) == 0) {\n if (s == ty->next) return s;\n /* Move s to the top of the linked list */\n s->prev->next = s->next;\n if (s->next) {\n s->next->prev = s->prev;\n }\n /* Insert s as second element in the list */\n s->next = ty->next;\n if (ty->next) ty->next->prev = s;\n ty->next = s;\n s->prev = ty;\n return s;\n }\n s = s->next;\n } while (s && (s != ty->next));\n return 0;\n}\n\n/* Cast a pointer up an inheritance hierarchy */\nSWIGRUNTIME(void *) \nSWIG_TypeCast(swig_type_info *ty, void *ptr) {\n if ((!ty) || (!ty->converter)) return ptr;\n return (*ty->converter)(ptr);\n}\n\n/* Return the name associated with this type */\nSWIGRUNTIME(const char *)\nSWIG_TypeName(const swig_type_info *ty) {\n return ty->name;\n}\n\n/* \n Compare two type names skipping the space characters, therefore\n \"char*\" == \"char *\" and \"Class\" == \"Class\", etc.\n\n Return 0 when the two name types are equivalent, as in\n strncmp, but skipping ' '.\n*/\nstatic int\nSWIG_TypeNameComp(const char *f1, const char *l1,\n\t\t const char *f2, const char *l2) {\n for (;(f1 != l1) && (f2 != l2); ++f1, ++f2) {\n while ((*f1 == ' ') && (f1 != l1)) ++f1;\n while ((*f2 == ' ') && (f2 != l2)) ++f2;\n if (*f1 != *f2) return *f1 - *f2;\n }\n return (l1 - f1) - (l2 - f2);\n}\n\n/*\n Check type equivalence in a name list like ||...\n*/\nstatic int\nSWIG_TypeEquiv(const char *nb, const char *tb) {\n int equiv = 0;\n const char* te = tb + strlen(tb);\n const char* ne = nb;\n while (!equiv && *ne) {\n for (nb = ne; *ne; ++ne) {\n if (*ne == '|') break;\n }\n equiv = SWIG_TypeNameComp(nb, ne, tb, te) == 0;\n if (*ne) ++ne;\n }\n return equiv;\n}\n \n\n/* Search for a swig_type_info structure */\nSWIGRUNTIME(swig_type_info *)\nSWIG_TypeQuery(const char *name) {\n swig_type_info *ty = swig_type_list;\n while (ty) {\n if (ty->str && (SWIG_TypeEquiv(ty->str,name))) return ty;\n if (ty->name && (strcmp(name,ty->name) == 0)) return ty;\n ty = ty->prev;\n }\n return 0;\n}\n\n/* Pack binary data into a string */\nSWIGRUNTIME(char *)\nSWIG_PackData(char *c, void *ptr, int sz) {\n static char hex[17] = \"0123456789abcdef\";\n int i;\n unsigned char *u = (unsigned char *) ptr;\n register unsigned char uu;\n for (i = 0; i < sz; i++,u++) {\n uu = *u;\n *(c++) = hex[(uu & 0xf0) >> 4];\n *(c++) = hex[uu & 0xf];\n }\n return c;\n}\n\n/* Unpack binary data from a string */\nSWIGRUNTIME(char *)\nSWIG_UnpackData(char *c, void *ptr, int sz) {\n register unsigned char uu = 0;\n register int d;\n unsigned char *u = (unsigned char *) ptr;\n int i;\n for (i = 0; i < sz; i++, u++) {\n d = *(c++);\n if ((d >= '0') && (d <= '9'))\n uu = ((d - '0') << 4);\n else if ((d >= 'a') && (d <= 'f'))\n uu = ((d - ('a'-10)) << 4);\n d = *(c++);\n if ((d >= '0') && (d <= '9'))\n uu |= (d - '0');\n else if ((d >= 'a') && (d <= 'f'))\n uu |= (d - ('a'-10));\n *u = uu;\n }\n return c;\n}\n\n#endif\n\n#ifdef __cplusplus\n}\n#endif\n\n/***********************************************************************\n * python.swg\n *\n * This file contains the runtime support for Python modules\n * and includes code for managing global variables and pointer\n * type checking.\n *\n * Author : David Beazley (beazley@cs.uchicago.edu)\n ************************************************************************/\n\n#include \"Python.h\"\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n#define SWIG_PY_INT 1\n#define SWIG_PY_FLOAT 2\n#define SWIG_PY_STRING 3\n#define SWIG_PY_POINTER 4\n#define SWIG_PY_BINARY 5\n\n/* Flags for pointer conversion */\n\n#define SWIG_POINTER_EXCEPTION 0x1\n#define SWIG_POINTER_DISOWN 0x2\n\n/* Exception handling in wrappers */\n#define SWIG_fail goto fail\n\n/* Constant information structure */\ntypedef struct swig_const_info {\n int type;\n char *name;\n long lvalue;\n double dvalue;\n void *pvalue;\n swig_type_info **ptype;\n} swig_const_info;\n\n/* Common SWIG API */\n#define SWIG_ConvertPtr(obj, pp, type, flags) \\\n SWIG_Python_ConvertPtr(obj, pp, type, flags)\n#define SWIG_NewPointerObj(p, type, flags) \\\n SWIG_Python_NewPointerObj(p, type, flags)\n#define SWIG_MustGetPtr(p, type, argnum, flags) \\\n SWIG_Python_MustGetPtr(p, type, argnum, flags)\n \n\ntypedef double (*py_objasdbl_conv)(PyObject *obj);\n\n#ifdef SWIG_NOINCLUDE\n\nSWIGIMPORT(int) SWIG_Python_ConvertPtr(PyObject *, void **, swig_type_info *, int);\nSWIGIMPORT(PyObject *) SWIG_Python_NewPointerObj(void *, swig_type_info *,int own);\nSWIGIMPORT(void *) SWIG_Python_MustGetPtr(PyObject *, swig_type_info *, int, int);\n\n#else\n\n\n/* Convert a pointer value */\nSWIGRUNTIME(int)\nSWIG_Python_ConvertPtr(PyObject *obj, void **ptr, swig_type_info *ty, int flags) {\n swig_type_info *tc;\n char *c = 0;\n static PyObject *SWIG_this = 0;\n int newref = 0;\n PyObject *pyobj = 0;\n\n if (!obj) return 0;\n if (obj == Py_None) {\n *ptr = 0;\n return 0;\n }\n#ifdef SWIG_COBJECT_TYPES\n if (!(PyCObject_Check(obj))) {\n if (!SWIG_this)\n SWIG_this = PyString_FromString(\"this\");\n pyobj = obj;\n obj = PyObject_GetAttr(obj,SWIG_this);\n newref = 1;\n if (!obj) goto type_error;\n if (!PyCObject_Check(obj)) {\n Py_DECREF(obj);\n goto type_error;\n }\n } \n *ptr = PyCObject_AsVoidPtr(obj);\n c = (char *) PyCObject_GetDesc(obj);\n if (newref) Py_DECREF(obj);\n goto cobject;\n#else\n if (!(PyString_Check(obj))) {\n if (!SWIG_this)\n SWIG_this = PyString_FromString(\"this\");\n pyobj = obj;\n obj = PyObject_GetAttr(obj,SWIG_this);\n newref = 1;\n if (!obj) goto type_error;\n if (!PyString_Check(obj)) {\n Py_DECREF(obj);\n goto type_error;\n }\n } \n c = PyString_AsString(obj);\n /* Pointer values must start with leading underscore */\n if (*c != '_') {\n *ptr = (void *) 0;\n if (strcmp(c,\"NULL\") == 0) {\n if (newref) { Py_DECREF(obj); }\n return 0;\n } else {\n if (newref) { Py_DECREF(obj); }\n goto type_error;\n }\n }\n c++;\n c = SWIG_UnpackData(c,ptr,sizeof(void *));\n if (newref) { Py_DECREF(obj); }\n#endif\n\n#ifdef SWIG_COBJECT_TYPES\ncobject:\n#endif\n\n if (ty) {\n tc = SWIG_TypeCheck(c,ty);\n if (!tc) goto type_error;\n *ptr = SWIG_TypeCast(tc,(void*) *ptr);\n }\n\n if ((pyobj) && (flags & SWIG_POINTER_DISOWN)) {\n PyObject *zero = PyInt_FromLong(0);\n PyObject_SetAttrString(pyobj,(char*)\"thisown\",zero);\n Py_DECREF(zero);\n }\n return 0;\n\ntype_error:\n PyErr_Clear();\n if (flags & SWIG_POINTER_EXCEPTION) {\n if (ty && c) {\n PyErr_Format(PyExc_TypeError, \n\t\t \"Type error. Got %s, expected %s\",\n\t\t c, ty->name);\n } else {\n PyErr_SetString(PyExc_TypeError,\"Expected a pointer\");\n }\n }\n return -1;\n}\n\n/* Convert a pointer value, signal an exception on a type mismatch */\nSWIGRUNTIME(void *)\nSWIG_Python_MustGetPtr(PyObject *obj, swig_type_info *ty, int argnum, int flags) {\n void *result;\n SWIG_Python_ConvertPtr(obj, &result, ty, flags | SWIG_POINTER_EXCEPTION);\n return result;\n}\n\n/* Create a new pointer object */\nSWIGRUNTIME(PyObject *)\nSWIG_Python_NewPointerObj(void *ptr, swig_type_info *type, int own) {\n PyObject *robj;\n if (!ptr) {\n Py_INCREF(Py_None);\n return Py_None;\n }\n#ifdef SWIG_COBJECT_TYPES\n robj = PyCObject_FromVoidPtrAndDesc((void *) ptr, (char *) type->name, NULL);\n#else\n {\n char result[1024];\n char *r = result;\n *(r++) = '_';\n r = SWIG_PackData(r,&ptr,sizeof(void *));\n strcpy(r,type->name);\n robj = PyString_FromString(result);\n }\n#endif\n if (!robj || (robj == Py_None)) return robj;\n if (type->clientdata) {\n PyObject *inst;\n PyObject *args = Py_BuildValue((char*)\"(O)\", robj);\n Py_DECREF(robj);\n inst = PyObject_CallObject((PyObject *) type->clientdata, args);\n Py_DECREF(args);\n if (inst) {\n if (own) {\n PyObject *n = PyInt_FromLong(1);\n PyObject_SetAttrString(inst,(char*)\"thisown\",n);\n Py_DECREF(n);\n }\n robj = inst;\n }\n }\n return robj;\n}\n\n#endif\n\n#ifdef __cplusplus\n}\n#endif\n\n\"\"\"\n\n\n######################################################################\n# This is for SWIG-1.3.x where x >= 23.\n# SWIG_RUNTIME_VERSION == \"1\"\n\n# All this does is to include (cut/paste): \n# and \nswigptr2_code_v1 = \"\"\"\n/***********************************************************************\n * swigrun.swg\n *\n * This file contains generic CAPI SWIG runtime support for pointer\n * type checking.\n *\n ************************************************************************/\n\n/* This should only be incremented when either the layout of swig_type_info changes,\n or for whatever reason, the runtime changes incompatibly */\n#define SWIG_RUNTIME_VERSION \"1\"\n\n/* define SWIG_TYPE_TABLE_NAME as \"SWIG_TYPE_TABLE\" */\n#ifdef SWIG_TYPE_TABLE\n#define SWIG_QUOTE_STRING(x) #x\n#define SWIG_EXPAND_AND_QUOTE_STRING(x) SWIG_QUOTE_STRING(x)\n#define SWIG_TYPE_TABLE_NAME SWIG_EXPAND_AND_QUOTE_STRING(SWIG_TYPE_TABLE)\n#else\n#define SWIG_TYPE_TABLE_NAME\n#endif\n\n#include \n\n#ifndef SWIGINLINE\n#if defined(__cplusplus) || (defined(__GNUC__) && !defined(__STRICT_ANSI__))\n# define SWIGINLINE inline\n#else\n# define SWIGINLINE\n#endif\n#endif\n\n/*\n You can use the SWIGRUNTIME and SWIGRUNTIMEINLINE macros for\n creating a static or dynamic library from the swig runtime code.\n In 99.9% of the cases, swig just needs to declare them as 'static'.\n \n But only do this if is strictly necessary, ie, if you have problems\n with your compiler or so.\n*/\n#ifndef SWIGRUNTIME\n#define SWIGRUNTIME static\n#endif\n#ifndef SWIGRUNTIMEINLINE\n#define SWIGRUNTIMEINLINE SWIGRUNTIME SWIGINLINE\n#endif\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\ntypedef void *(*swig_converter_func)(void *);\ntypedef struct swig_type_info *(*swig_dycast_func)(void **);\n\ntypedef struct swig_type_info {\n const char *name;\n swig_converter_func converter;\n const char *str;\n void *clientdata;\n swig_dycast_func dcast;\n struct swig_type_info *next;\n struct swig_type_info *prev;\n} swig_type_info;\n\n/* \n Compare two type names skipping the space characters, therefore\n \"char*\" == \"char *\" and \"Class\" == \"Class\", etc.\n\n Return 0 when the two name types are equivalent, as in\n strncmp, but skipping ' '.\n*/\nSWIGRUNTIME int\nSWIG_TypeNameComp(const char *f1, const char *l1,\n\t\t const char *f2, const char *l2) {\n for (;(f1 != l1) && (f2 != l2); ++f1, ++f2) {\n while ((*f1 == ' ') && (f1 != l1)) ++f1;\n while ((*f2 == ' ') && (f2 != l2)) ++f2;\n if (*f1 != *f2) return *f1 - *f2;\n }\n return (l1 - f1) - (l2 - f2);\n}\n\n/*\n Check type equivalence in a name list like ||...\n*/\nSWIGRUNTIME int\nSWIG_TypeEquiv(const char *nb, const char *tb) {\n int equiv = 0;\n const char* te = tb + strlen(tb);\n const char* ne = nb;\n while (!equiv && *ne) {\n for (nb = ne; *ne; ++ne) {\n if (*ne == '|') break;\n }\n equiv = SWIG_TypeNameComp(nb, ne, tb, te) == 0;\n if (*ne) ++ne;\n }\n return equiv;\n}\n\n/*\n Register a type mapping with the type-checking\n*/\nSWIGRUNTIME swig_type_info *\nSWIG_TypeRegisterTL(swig_type_info **tl, swig_type_info *ti) {\n swig_type_info *tc, *head, *ret, *next;\n /* Check to see if this type has already been registered */\n tc = *tl;\n while (tc) {\n /* check simple type equivalence */\n int typeequiv = (strcmp(tc->name, ti->name) == 0); \n /* check full type equivalence, resolving typedefs */\n if (!typeequiv) {\n /* only if tc is not a typedef (no '|' on it) */\n if (tc->str && ti->str && !strstr(tc->str,\"|\")) {\n\ttypeequiv = SWIG_TypeEquiv(ti->str,tc->str);\n }\n }\n if (typeequiv) {\n /* Already exists in the table. Just add additional types to the list */\n if (ti->clientdata) tc->clientdata = ti->clientdata;\n head = tc;\n next = tc->next;\n goto l1;\n }\n tc = tc->prev;\n }\n head = ti;\n next = 0;\n\n /* Place in list */\n ti->prev = *tl;\n *tl = ti;\n\n /* Build linked lists */\n l1:\n ret = head;\n tc = ti + 1;\n /* Patch up the rest of the links */\n while (tc->name) {\n head->next = tc;\n tc->prev = head;\n head = tc;\n tc++;\n }\n if (next) next->prev = head;\n head->next = next;\n\n return ret;\n}\n\n/*\n Check the typename\n*/\nSWIGRUNTIME swig_type_info *\nSWIG_TypeCheck(const char *c, swig_type_info *ty) {\n swig_type_info *s;\n if (!ty) return 0; /* Void pointer */\n s = ty->next; /* First element always just a name */\n do {\n if (strcmp(s->name,c) == 0) {\n if (s == ty->next) return s;\n /* Move s to the top of the linked list */\n s->prev->next = s->next;\n if (s->next) {\n s->next->prev = s->prev;\n }\n /* Insert s as second element in the list */\n s->next = ty->next;\n if (ty->next) ty->next->prev = s;\n ty->next = s;\n s->prev = ty;\n return s;\n }\n s = s->next;\n } while (s && (s != ty->next));\n return 0;\n}\n\n/*\n Cast a pointer up an inheritance hierarchy\n*/\nSWIGRUNTIMEINLINE void *\nSWIG_TypeCast(swig_type_info *ty, void *ptr) {\n return ((!ty) || (!ty->converter)) ? ptr : (*ty->converter)(ptr);\n}\n\n/* \n Dynamic pointer casting. Down an inheritance hierarchy\n*/\nSWIGRUNTIME swig_type_info *\nSWIG_TypeDynamicCast(swig_type_info *ty, void **ptr) {\n swig_type_info *lastty = ty;\n if (!ty || !ty->dcast) return ty;\n while (ty && (ty->dcast)) {\n ty = (*ty->dcast)(ptr);\n if (ty) lastty = ty;\n }\n return lastty;\n}\n\n/*\n Return the name associated with this type\n*/\nSWIGRUNTIMEINLINE const char *\nSWIG_TypeName(const swig_type_info *ty) {\n return ty->name;\n}\n\n/*\n Return the pretty name associated with this type,\n that is an unmangled type name in a form presentable to the user.\n*/\nSWIGRUNTIME const char *\nSWIG_TypePrettyName(const swig_type_info *type) {\n /* The \"str\" field contains the equivalent pretty names of the\n type, separated by vertical-bar characters. We choose\n to print the last name, as it is often (?) the most\n specific. */\n if (type->str != NULL) {\n const char *last_name = type->str;\n const char *s;\n for (s = type->str; *s; s++)\n if (*s == '|') last_name = s+1;\n return last_name;\n }\n else\n return type->name;\n}\n\n/*\n Search for a swig_type_info structure\n*/\nSWIGRUNTIME swig_type_info *\nSWIG_TypeQueryTL(swig_type_info *tl, const char *name) {\n swig_type_info *ty = tl;\n while (ty) {\n if (ty->str && (SWIG_TypeEquiv(ty->str,name))) return ty;\n if (ty->name && (strcmp(name,ty->name) == 0)) return ty;\n ty = ty->prev;\n }\n return 0;\n}\n\n/* \n Set the clientdata field for a type\n*/\nSWIGRUNTIME void\nSWIG_TypeClientDataTL(swig_type_info *tl, swig_type_info *ti, void *clientdata) {\n swig_type_info *tc, *equiv;\n if (ti->clientdata) return;\n /* if (ti->clientdata == clientdata) return; */\n ti->clientdata = clientdata;\n equiv = ti->next;\n while (equiv) {\n if (!equiv->converter) {\n tc = tl;\n while (tc) {\n if ((strcmp(tc->name, equiv->name) == 0))\n SWIG_TypeClientDataTL(tl,tc,clientdata);\n tc = tc->prev;\n }\n }\n equiv = equiv->next;\n }\n}\n\n/* \n Pack binary data into a string\n*/\nSWIGRUNTIME char *\nSWIG_PackData(char *c, void *ptr, size_t sz) {\n static char hex[17] = \"0123456789abcdef\";\n unsigned char *u = (unsigned char *) ptr;\n const unsigned char *eu = u + sz;\n register unsigned char uu;\n for (; u != eu; ++u) {\n uu = *u;\n *(c++) = hex[(uu & 0xf0) >> 4];\n *(c++) = hex[uu & 0xf];\n }\n return c;\n}\n\n/* \n Unpack binary data from a string\n*/\nSWIGRUNTIME const char *\nSWIG_UnpackData(const char *c, void *ptr, size_t sz) {\n register unsigned char *u = (unsigned char *) ptr;\n register const unsigned char *eu = u + sz;\n for (; u != eu; ++u) {\n register int d = *(c++);\n register unsigned char uu = 0;\n if ((d >= '0') && (d <= '9'))\n uu = ((d - '0') << 4);\n else if ((d >= 'a') && (d <= 'f'))\n uu = ((d - ('a'-10)) << 4);\n else \n return (char *) 0;\n d = *(c++);\n if ((d >= '0') && (d <= '9'))\n uu |= (d - '0');\n else if ((d >= 'a') && (d <= 'f'))\n uu |= (d - ('a'-10));\n else \n return (char *) 0;\n *u = uu;\n }\n return c;\n}\n\n/*\n This function will propagate the clientdata field of type to any new\n swig_type_info structures that have been added into the list of\n equivalent types. It is like calling SWIG_TypeClientData(type,\n clientdata) a second time.\n*/\nSWIGRUNTIME void\nSWIG_PropagateClientDataTL(swig_type_info *tl, swig_type_info *type) {\n swig_type_info *equiv = type->next;\n swig_type_info *tc;\n if (!type->clientdata) return;\n while (equiv) {\n if (!equiv->converter) {\n tc = tl;\n while (tc) {\n if ((strcmp(tc->name, equiv->name) == 0) && !tc->clientdata)\n SWIG_TypeClientDataTL(tl,tc, type->clientdata);\n tc = tc->prev;\n }\n }\n equiv = equiv->next;\n }\n}\n\n/* \n Pack 'void *' into a string buffer.\n*/\nSWIGRUNTIME char *\nSWIG_PackVoidPtr(char *buff, void *ptr, const char *name, size_t bsz) {\n char *r = buff;\n if ((2*sizeof(void *) + 2) > bsz) return 0;\n *(r++) = '_';\n r = SWIG_PackData(r,&ptr,sizeof(void *));\n if (strlen(name) + 1 > (bsz - (r - buff))) return 0;\n strcpy(r,name);\n return buff;\n}\n\nSWIGRUNTIME const char *\nSWIG_UnpackVoidPtr(const char *c, void **ptr, const char *name) {\n if (*c != '_') {\n if (strcmp(c,\"NULL\") == 0) {\n *ptr = (void *) 0;\n return name;\n } else {\n return 0;\n }\n }\n return SWIG_UnpackData(++c,ptr,sizeof(void *));\n}\n\nSWIGRUNTIME char *\nSWIG_PackDataName(char *buff, void *ptr, size_t sz, const char *name, size_t bsz) {\n char *r = buff;\n size_t lname = (name ? strlen(name) : 0);\n if ((2*sz + 2 + lname) > bsz) return 0;\n *(r++) = '_';\n r = SWIG_PackData(r,ptr,sz);\n if (lname) {\n strncpy(r,name,lname+1);\n } else {\n *r = 0;\n }\n return buff;\n}\n\nSWIGRUNTIME const char *\nSWIG_UnpackDataName(const char *c, void *ptr, size_t sz, const char *name) {\n if (*c != '_') {\n if (strcmp(c,\"NULL\") == 0) {\n memset(ptr,0,sz);\n return name;\n } else {\n return 0;\n }\n }\n return SWIG_UnpackData(++c,ptr,sz);\n}\n\n#ifdef __cplusplus\n}\n#endif\n\n/***********************************************************************\n * pyrun.swg\n *\n * This file contains the runtime support for Python modules\n * and includes code for managing global variables and pointer\n * type checking.\n *\n * Author : David Beazley (beazley@cs.uchicago.edu)\n ************************************************************************/\n\n/* Common SWIG API */\n#define SWIG_ConvertPtr(obj, pp, type, flags) SWIG_Python_ConvertPtr(obj, pp, type, flags)\n#define SWIG_NewPointerObj(p, type, flags) SWIG_Python_NewPointerObj(p, type, flags)\n#define SWIG_MustGetPtr(p, type, argnum, flags) SWIG_Python_MustGetPtr(p, type, argnum, flags)\n \n\n/* Python-specific SWIG API */\n#define SWIG_ConvertPacked(obj, ptr, sz, ty, flags) SWIG_Python_ConvertPacked(obj, ptr, sz, ty, flags)\n#define SWIG_NewPackedObj(ptr, sz, type) SWIG_Python_NewPackedObj(ptr, sz, type)\n\n\n/* -----------------------------------------------------------------------------\n * Pointer declarations\n * ----------------------------------------------------------------------------- */\n/*\n Use SWIG_NO_COBJECT_TYPES to force the use of strings to represent\n C/C++ pointers in the python side. Very useful for debugging, but\n not always safe.\n*/\n#if !defined(SWIG_NO_COBJECT_TYPES) && !defined(SWIG_COBJECT_TYPES)\n# define SWIG_COBJECT_TYPES\n#endif\n\n/* Flags for pointer conversion */\n#define SWIG_POINTER_EXCEPTION 0x1\n#define SWIG_POINTER_DISOWN 0x2\n\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n/* -----------------------------------------------------------------------------\n * Create a new pointer string \n * ----------------------------------------------------------------------------- */\n\n#ifndef SWIG_BUFFER_SIZE\n#define SWIG_BUFFER_SIZE 1024\n#endif\n\n#if defined(SWIG_COBJECT_TYPES)\n#if !defined(SWIG_COBJECT_PYTHON)\n/* -----------------------------------------------------------------------------\n * Implements a simple Swig Object type, and use it instead of PyCObject\n * ----------------------------------------------------------------------------- */\n\ntypedef struct {\n PyObject_HEAD\n void *ptr;\n const char *desc;\n} PySwigObject;\n\n/* Declarations for objects of type PySwigObject */\n\nSWIGRUNTIME int\nPySwigObject_print(PySwigObject *v, FILE *fp, int flags)\n{\n char result[SWIG_BUFFER_SIZE];\n if (SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result))) {\n fputs(\"\", fp);\n return 0; \n } else {\n return 1; \n }\n}\n \nSWIGRUNTIME PyObject *\nPySwigObject_repr(PySwigObject *v)\n{\n char result[SWIG_BUFFER_SIZE];\n return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?\n PyString_FromFormat(\"\", result) : 0;\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_str(PySwigObject *v)\n{\n char result[SWIG_BUFFER_SIZE];\n return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?\n PyString_FromString(result) : 0;\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_long(PySwigObject *v)\n{\n return PyLong_FromUnsignedLong((unsigned long) v->ptr);\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_oct(PySwigObject *v)\n{\n char buf[100];\n unsigned long x = (unsigned long)v->ptr;\n if (x == 0)\n strcpy(buf, \"0\");\n else\n PyOS_snprintf(buf, sizeof(buf), \"0%lo\", x);\n return PyString_FromString(buf);\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_hex(PySwigObject *v)\n{\n char buf[100];\n PyOS_snprintf(buf, sizeof(buf), \"0x%lx\", (unsigned long)v->ptr);\n return PyString_FromString(buf);\n}\n\nSWIGRUNTIME int\nPySwigObject_compare(PySwigObject *v, PySwigObject *w)\n{\n int c = strcmp(v->desc, w->desc);\n if (c) {\n return c;\n } else {\n void *i = v->ptr;\n void *j = w->ptr;\n return (i < j) ? -1 : (i > j) ? 1 : 0;\n }\n}\n\nSWIGRUNTIME void\nPySwigObject_dealloc(PySwigObject *self)\n{\n PyObject_DEL(self);\n}\n\nSWIGRUNTIME PyTypeObject*\nPySwigObject_GetType() {\n static char PySwigObject_Type__doc__[] = \n \"Swig object carries a C/C++ instance pointer\";\n \n static PyNumberMethods PySwigObject_as_number = {\n (binaryfunc)0, /*nb_add*/\n (binaryfunc)0, /*nb_subtract*/\n (binaryfunc)0, /*nb_multiply*/\n (binaryfunc)0, /*nb_divide*/\n (binaryfunc)0, /*nb_remainder*/\n (binaryfunc)0, /*nb_divmod*/\n (ternaryfunc)0,/*nb_power*/\n (unaryfunc)0, /*nb_negative*/\n (unaryfunc)0, /*nb_positive*/\n (unaryfunc)0, /*nb_absolute*/\n (inquiry)0, /*nb_nonzero*/\n 0,\t\t /*nb_invert*/\n 0,\t\t /*nb_lshift*/\n 0,\t\t /*nb_rshift*/\n 0,\t\t /*nb_and*/\n 0,\t\t /*nb_xor*/\n 0,\t\t /*nb_or*/\n (coercion)0, /*nb_coerce*/\n (unaryfunc)PySwigObject_long, /*nb_int*/\n (unaryfunc)PySwigObject_long, /*nb_long*/\n (unaryfunc)0, /*nb_float*/\n (unaryfunc)PySwigObject_oct, /*nb_oct*/\n (unaryfunc)PySwigObject_hex, /*nb_hex*/\n#if PY_VERSION_HEX >= 0x02000000\n 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 /* nb_inplace_add -> nb_inplace_true_divide */ \n#endif\n };\n\n static PyTypeObject PySwigObject_Type = {\n PyObject_HEAD_INIT(&PyType_Type)\n 0,\t\t\t\t\t/*ob_size*/\n \"PySwigObject\",\t\t\t/*tp_name*/\n sizeof(PySwigObject),\t\t/*tp_basicsize*/\n 0,\t\t\t\t\t/*tp_itemsize*/\n /* methods */\n (destructor)PySwigObject_dealloc,\t/*tp_dealloc*/\n (printfunc)PySwigObject_print,\t/*tp_print*/\n (getattrfunc)0,\t\t\t/*tp_getattr*/\n (setattrfunc)0,\t\t\t/*tp_setattr*/\n (cmpfunc)PySwigObject_compare,\t/*tp_compare*/\n (reprfunc)PySwigObject_repr,\t/*tp_repr*/\n &PySwigObject_as_number,\t /*tp_as_number*/\n 0,\t\t\t\t\t/*tp_as_sequence*/\n 0,\t\t\t\t\t/*tp_as_mapping*/\n (hashfunc)0,\t\t\t/*tp_hash*/\n (ternaryfunc)0,\t\t\t/*tp_call*/\n (reprfunc)PySwigObject_str,\t\t/*tp_str*/\n /* Space for future expansion */\n 0L,0L,0L,0L,\n PySwigObject_Type__doc__, \t /* Documentation string */\n#if PY_VERSION_HEX >= 0x02000000\n 0, /* tp_traverse */\n 0, /* tp_clear */\n#endif\n#if PY_VERSION_HEX >= 0x02010000\n 0, /* tp_richcompare */\n 0, /* tp_weaklistoffset */\n#endif\n#if PY_VERSION_HEX >= 0x02020000\n 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */\n#endif\n#if PY_VERSION_HEX >= 0x02030000\n 0, /* tp_del */\n#endif\n#ifdef COUNT_ALLOCS\n 0,0,0,0 /* tp_alloc -> tp_next */\n#endif\n };\n\n return &PySwigObject_Type;\n}\n\nSWIGRUNTIME PyObject *\nPySwigObject_FromVoidPtrAndDesc(void *ptr, const char *desc)\n{\n PySwigObject *self = PyObject_NEW(PySwigObject, PySwigObject_GetType());\n if (self == NULL) return NULL;\n self->ptr = ptr;\n self->desc = desc;\n return (PyObject *)self;\n}\n\nSWIGRUNTIMEINLINE void *\nPySwigObject_AsVoidPtr(PyObject *self)\n{\n return ((PySwigObject *)self)->ptr;\n}\n\nSWIGRUNTIMEINLINE const char *\nPySwigObject_GetDesc(PyObject *self)\n{\n return ((PySwigObject *)self)->desc;\n}\n\nSWIGRUNTIMEINLINE int\nPySwigObject_Check(PyObject *op) {\n return ((op)->ob_type == PySwigObject_GetType()) \n || (strcmp((op)->ob_type->tp_name,\"PySwigObject\") == 0);\n}\n\n/* -----------------------------------------------------------------------------\n * Implements a simple Swig Packed type, and use it instead of string\n * ----------------------------------------------------------------------------- */\n\ntypedef struct {\n PyObject_HEAD\n void *pack;\n const char *desc;\n size_t size;\n} PySwigPacked;\n\nSWIGRUNTIME int\nPySwigPacked_print(PySwigPacked *v, FILE *fp, int flags)\n{\n char result[SWIG_BUFFER_SIZE];\n fputs(\"pack, v->size, 0, sizeof(result))) {\n fputs(\"at \", fp); \n fputs(result, fp); \n }\n fputs(v->desc,fp); \n fputs(\">\", fp);\n return 0; \n}\n \nSWIGRUNTIME PyObject *\nPySwigPacked_repr(PySwigPacked *v)\n{\n char result[SWIG_BUFFER_SIZE];\n if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))) {\n return PyString_FromFormat(\"\", result, v->desc);\n } else {\n return PyString_FromFormat(\"\", v->desc);\n } \n}\n\nSWIGRUNTIME PyObject *\nPySwigPacked_str(PySwigPacked *v)\n{\n char result[SWIG_BUFFER_SIZE];\n if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))){\n return PyString_FromFormat(\"%s%s\", result, v->desc);\n } else {\n return PyString_FromFormat(\"%s\", v->desc);\n } \n}\n\nSWIGRUNTIME int\nPySwigPacked_compare(PySwigPacked *v, PySwigPacked *w)\n{\n int c = strcmp(v->desc, w->desc);\n if (c) {\n return c;\n } else {\n size_t i = v->size;\n size_t j = w->size;\n int s = (i < j) ? -1 : (i > j) ? 1 : 0;\n return s ? s : strncmp((char *)v->pack, (char *)w->pack, 2*v->size);\n }\n}\n\nSWIGRUNTIME void\nPySwigPacked_dealloc(PySwigPacked *self)\n{\n free(self->pack);\n PyObject_DEL(self);\n}\n\nSWIGRUNTIME PyTypeObject*\nPySwigPacked_GetType() {\n static char PySwigPacked_Type__doc__[] = \n \"Swig object carries a C/C++ instance pointer\";\n \n static PyTypeObject PySwigPacked_Type = {\n PyObject_HEAD_INIT(&PyType_Type)\n 0,\t\t\t\t\t/*ob_size*/\n \"PySwigPacked\",\t\t\t/*tp_name*/\n sizeof(PySwigPacked),\t\t/*tp_basicsize*/\n 0,\t\t\t\t\t/*tp_itemsize*/\n /* methods */\n (destructor)PySwigPacked_dealloc,\t/*tp_dealloc*/\n (printfunc)PySwigPacked_print,\t/*tp_print*/\n (getattrfunc)0,\t\t\t/*tp_getattr*/\n (setattrfunc)0,\t\t\t/*tp_setattr*/\n (cmpfunc)PySwigPacked_compare,\t/*tp_compare*/\n (reprfunc)PySwigPacked_repr,\t/*tp_repr*/\n 0,\t /*tp_as_number*/\n 0,\t\t\t\t\t/*tp_as_sequence*/\n 0,\t\t\t\t\t/*tp_as_mapping*/\n (hashfunc)0,\t\t\t/*tp_hash*/\n (ternaryfunc)0,\t\t\t/*tp_call*/\n (reprfunc)PySwigPacked_str,\t\t/*tp_str*/\n /* Space for future expansion */\n 0L,0L,0L,0L,\n PySwigPacked_Type__doc__, \t /* Documentation string */\n#if PY_VERSION_HEX >= 0x02000000\n 0, /* tp_traverse */\n 0, /* tp_clear */\n#endif\n#if PY_VERSION_HEX >= 0x02010000\n 0, /* tp_richcompare */\n 0, /* tp_weaklistoffset */\n#endif\n#if PY_VERSION_HEX >= 0x02020000 \n 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */\n#endif\n#if PY_VERSION_HEX >= 0x02030000\n 0, /* tp_del */\n#endif\n#ifdef COUNT_ALLOCS\n 0,0,0,0 /* tp_alloc -> tp_next */\n#endif\n };\n\n return &PySwigPacked_Type;\n}\n\nSWIGRUNTIME PyObject *\nPySwigPacked_FromDataAndDesc(void *ptr, size_t size, const char *desc)\n{\n PySwigPacked *self = PyObject_NEW(PySwigPacked, PySwigPacked_GetType());\n if (self == NULL) {\n return NULL;\n } else {\n void *pack = malloc(size);\n memcpy(pack, ptr, size);\n self->pack = pack;\n self->desc = desc;\n self->size = size;\n return (PyObject *) self;\n }\n}\n\nSWIGRUNTIMEINLINE const char *\nPySwigPacked_UnpackData(PyObject *obj, void *ptr, size_t size)\n{\n PySwigPacked *self = (PySwigPacked *)obj;\n if (self->size != size) return 0;\n memcpy(ptr, self->pack, size);\n return self->desc;\n}\n\nSWIGRUNTIMEINLINE const char *\nPySwigPacked_GetDesc(PyObject *self)\n{\n return ((PySwigPacked *)self)->desc;\n}\n\nSWIGRUNTIMEINLINE int\nPySwigPacked_Check(PyObject *op) {\n return ((op)->ob_type == PySwigPacked_GetType()) \n || (strcmp((op)->ob_type->tp_name,\"PySwigPacked\") == 0);\n}\n\n#else\n/* -----------------------------------------------------------------------------\n * Use the old Python PyCObject instead of PySwigObject\n * ----------------------------------------------------------------------------- */\n\n#define PySwigObject_GetDesc(obj)\t PyCObject_GetDesc(obj)\n#define PySwigObject_Check(obj)\t PyCObject_Check(obj)\n#define PySwigObject_AsVoidPtr(obj)\t PyCObject_AsVoidPtr(obj)\n#define PySwigObject_FromVoidPtrAndDesc(p, d) PyCObject_FromVoidPtrAndDesc(p, d, NULL)\n\n#endif\n\n#endif\n\n/* -----------------------------------------------------------------------------\n * errors manipulation\n * ----------------------------------------------------------------------------- */\n\nSWIGRUNTIME void\nSWIG_Python_TypeError(const char *type, PyObject *obj)\n{\n if (type) {\n#if defined(SWIG_COBJECT_TYPES)\n if (PySwigObject_Check(obj)) {\n const char *otype = (const char *) PySwigObject_GetDesc(obj);\n if (otype) {\n\tPyErr_Format(PyExc_TypeError, \"a '%s' is expected, 'PySwigObject(%s)' is received\",\n\t\t type, otype);\n\treturn;\n }\n } else \n#endif \n {\n const char *otype = (obj ? obj->ob_type->tp_name : 0); \n if (otype) {\n\tPyObject *str = PyObject_Str(obj);\n\tconst char *cstr = str ? PyString_AsString(str) : 0;\n\tif (cstr) {\n\t PyErr_Format(PyExc_TypeError, \"a '%s' is expected, '%s(%s)' is received\",\n\t\t type, otype, cstr);\n\t} else {\n\t PyErr_Format(PyExc_TypeError, \"a '%s' is expected, '%s' is received\",\n\t\t type, otype);\n\t}\n\tPy_DECREF(str);\n\treturn;\n }\n } \n PyErr_Format(PyExc_TypeError, \"a '%s' is expected\", type);\n } else {\n PyErr_Format(PyExc_TypeError, \"unexpected type is received\");\n }\n}\n\nSWIGRUNTIMEINLINE void\nSWIG_Python_NullRef(const char *type)\n{\n if (type) {\n PyErr_Format(PyExc_TypeError, \"null reference of type '%s' was received\",type);\n } else {\n PyErr_Format(PyExc_TypeError, \"null reference was received\");\n }\n}\n\nSWIGRUNTIME int\nSWIG_Python_AddErrMesg(const char* mesg, int infront)\n{\n if (PyErr_Occurred()) {\n PyObject *type = 0;\n PyObject *value = 0;\n PyObject *traceback = 0;\n PyErr_Fetch(&type, &value, &traceback);\n if (value) {\n PyObject *old_str = PyObject_Str(value);\n Py_XINCREF(type);\n PyErr_Clear();\n if (infront) {\n\tPyErr_Format(type, \"%s %s\", mesg, PyString_AsString(old_str));\n } else {\n\tPyErr_Format(type, \"%s %s\", PyString_AsString(old_str), mesg);\n }\n Py_DECREF(old_str);\n }\n return 1;\n } else {\n return 0;\n }\n}\n\nSWIGRUNTIME int\nSWIG_Python_ArgFail(int argnum)\n{\n if (PyErr_Occurred()) {\n /* add information about failing argument */\n char mesg[256];\n sprintf(mesg, \"argument number %d:\", argnum);\n return SWIG_Python_AddErrMesg(mesg, 1);\n } else {\n return 0;\n }\n}\n\n\n/* -----------------------------------------------------------------------------\n * pointers/data manipulation\n * ----------------------------------------------------------------------------- */\n\n/* Convert a pointer value */\nSWIGRUNTIME int\nSWIG_Python_ConvertPtr(PyObject *obj, void **ptr, swig_type_info *ty, int flags) {\n swig_type_info *tc;\n const char *c = 0;\n static PyObject *SWIG_this = 0;\n int newref = 0;\n PyObject *pyobj = 0;\n void *vptr;\n \n if (!obj) return 0;\n if (obj == Py_None) {\n *ptr = 0;\n return 0;\n }\n\n#ifdef SWIG_COBJECT_TYPES\n if (!(PySwigObject_Check(obj))) {\n if (!SWIG_this)\n SWIG_this = PyString_FromString(\"this\");\n pyobj = obj;\n obj = PyObject_GetAttr(obj,SWIG_this);\n newref = 1;\n if (!obj) goto type_error;\n if (!PySwigObject_Check(obj)) {\n Py_DECREF(obj);\n goto type_error;\n }\n } \n vptr = PySwigObject_AsVoidPtr(obj);\n c = (const char *) PySwigObject_GetDesc(obj);\n if (newref) { Py_DECREF(obj); }\n goto type_check;\n#else\n if (!(PyString_Check(obj))) {\n if (!SWIG_this)\n SWIG_this = PyString_FromString(\"this\");\n pyobj = obj;\n obj = PyObject_GetAttr(obj,SWIG_this);\n newref = 1;\n if (!obj) goto type_error;\n if (!PyString_Check(obj)) {\n Py_DECREF(obj);\n goto type_error;\n }\n } \n c = PyString_AS_STRING(obj);\n /* Pointer values must start with leading underscore */\n c = SWIG_UnpackVoidPtr(c, &vptr, ty->name);\n if (newref) { Py_DECREF(obj); }\n if (!c) goto type_error;\n#endif\n\ntype_check:\n\n if (ty) {\n tc = SWIG_TypeCheck(c,ty);\n if (!tc) goto type_error;\n *ptr = SWIG_TypeCast(tc,vptr);\n }\n\n if ((pyobj) && (flags & SWIG_POINTER_DISOWN)) {\n PyObject_SetAttrString(pyobj,(char*)\"thisown\",Py_False);\n }\n return 0;\n\ntype_error:\n PyErr_Clear();\n if (pyobj && !obj) { \n obj = pyobj;\n if (PyCFunction_Check(obj)) {\n /* here we get the method pointer for callbacks */\n char *doc = (((PyCFunctionObject *)obj) -> m_ml -> ml_doc);\n c = doc ? strstr(doc, \"swig_ptr: \") : 0;\n if (c) {\n\tc = SWIG_UnpackVoidPtr(c + 10, &vptr, ty->name);\n\tif (!c) goto type_error;\n\tgoto type_check;\n }\n }\n }\n if (flags & SWIG_POINTER_EXCEPTION) {\n if (ty) {\n SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n } else {\n SWIG_Python_TypeError(\"C/C++ pointer\", obj);\n }\n }\n return -1;\n}\n\n/* Convert a pointer value, signal an exception on a type mismatch */\nSWIGRUNTIME void *\nSWIG_Python_MustGetPtr(PyObject *obj, swig_type_info *ty, int argnum, int flags) {\n void *result;\n if (SWIG_Python_ConvertPtr(obj, &result, ty, flags) == -1) {\n PyErr_Clear();\n if (flags & SWIG_POINTER_EXCEPTION) {\n SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n SWIG_Python_ArgFail(argnum);\n }\n }\n return result;\n}\n\n/* Convert a packed value value */\nSWIGRUNTIME int\nSWIG_Python_ConvertPacked(PyObject *obj, void *ptr, size_t sz, swig_type_info *ty, int flags) {\n swig_type_info *tc;\n const char *c = 0;\n\n#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)\n c = PySwigPacked_UnpackData(obj, ptr, sz);\n#else\n if ((!obj) || (!PyString_Check(obj))) goto type_error;\n c = PyString_AS_STRING(obj);\n /* Pointer values must start with leading underscore */\n c = SWIG_UnpackDataName(c, ptr, sz, ty->name);\n#endif\n if (!c) goto type_error;\n if (ty) {\n tc = SWIG_TypeCheck(c,ty);\n if (!tc) goto type_error;\n }\n return 0;\n\ntype_error:\n PyErr_Clear();\n if (flags & SWIG_POINTER_EXCEPTION) {\n if (ty) {\n SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);\n } else {\n SWIG_Python_TypeError(\"C/C++ packed data\", obj);\n }\n }\n return -1;\n} \n\n/* Create a new array object */\nSWIGRUNTIME PyObject *\nSWIG_Python_NewPointerObj(void *ptr, swig_type_info *type, int own) {\n PyObject *robj = 0;\n if (!ptr) {\n Py_INCREF(Py_None);\n return Py_None;\n }\n#ifdef SWIG_COBJECT_TYPES\n robj = PySwigObject_FromVoidPtrAndDesc((void *) ptr, (char *)type->name);\n#else\n {\n char result[SWIG_BUFFER_SIZE];\n robj = SWIG_PackVoidPtr(result, ptr, type->name, sizeof(result)) ?\n PyString_FromString(result) : 0;\n }\n#endif\n if (!robj || (robj == Py_None)) return robj;\n if (type->clientdata) {\n PyObject *inst;\n PyObject *args = Py_BuildValue((char*)\"(O)\", robj);\n Py_DECREF(robj);\n inst = PyObject_CallObject((PyObject *) type->clientdata, args);\n Py_DECREF(args);\n if (inst) {\n if (own) {\n PyObject_SetAttrString(inst,(char*)\"thisown\",Py_True);\n }\n robj = inst;\n }\n }\n return robj;\n}\n\nSWIGRUNTIME PyObject *\nSWIG_Python_NewPackedObj(void *ptr, size_t sz, swig_type_info *type) {\n PyObject *robj = 0;\n if (!ptr) {\n Py_INCREF(Py_None);\n return Py_None;\n }\n#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)\n robj = PySwigPacked_FromDataAndDesc((void *) ptr, sz, (char *)type->name);\n#else\n {\n char result[SWIG_BUFFER_SIZE];\n robj = SWIG_PackDataName(result, ptr, sz, type->name, sizeof(result)) ?\n PyString_FromString(result) : 0;\n }\n#endif\n return robj;\n}\n\n/* -----------------------------------------------------------------------------*\n * Get type list \n * -----------------------------------------------------------------------------*/\n\n#ifdef SWIG_LINK_RUNTIME\nvoid *SWIG_ReturnGlobalTypeList(void *);\n#endif\n\nSWIGRUNTIME swig_type_info **\nSWIG_Python_GetTypeListHandle() {\n static void *type_pointer = (void *)0;\n /* first check if module already created */\n if (!type_pointer) {\n#ifdef SWIG_LINK_RUNTIME\n type_pointer = SWIG_ReturnGlobalTypeList((void *)0);\n#else\n type_pointer = PyCObject_Import((char*)\"swig_runtime_data\" SWIG_RUNTIME_VERSION,\n\t\t\t\t (char*)\"type_pointer\" SWIG_TYPE_TABLE_NAME);\n if (PyErr_Occurred()) {\n PyErr_Clear();\n type_pointer = (void *)0;\n }\n }\n#endif\n return (swig_type_info **) type_pointer;\n}\n\n/*\n Search for a swig_type_info structure\n */\nSWIGRUNTIMEINLINE swig_type_info *\nSWIG_Python_GetTypeList() {\n swig_type_info **tlh = SWIG_Python_GetTypeListHandle();\n return tlh ? *tlh : (swig_type_info*)0;\n}\n\n#define SWIG_Runtime_GetTypeList SWIG_Python_GetTypeList \n\n#ifdef __cplusplus\n}\n#endif\n\n/* -----------------------------------------------------------------------------*\n Standard SWIG API for use inside user code.\n \n You need to include in your code as follow:\n\n#include // or using your favorite language \n#include \n#include // or using your favorite language \n#include \n \n * -----------------------------------------------------------------------------*/\n\nSWIGRUNTIMEINLINE swig_type_info *\nSWIG_Runtime_TypeQuery(const char *name) {\n swig_type_info *tl = SWIG_Runtime_GetTypeList();\n return SWIG_TypeQueryTL(tl, name);\n}\n\nSWIGRUNTIMEINLINE swig_type_info *\nSWIG_Runtime_TypeRegister(swig_type_info *ti) {\n swig_type_info *tl = SWIG_Runtime_GetTypeList();\n return SWIG_TypeRegisterTL(&tl, ti);\n}\n\nSWIGRUNTIMEINLINE void\nSWIG_Runtime_TypeClientData(swig_type_info *ti, void *clientdata) {\n swig_type_info *tl = SWIG_Runtime_GetTypeList();\n SWIG_TypeClientDataTL(tl, ti, clientdata);\n}\n\nSWIGRUNTIMEINLINE void\nSWIG_Runtime_PropagateClientData(swig_type_info *type) {\n swig_type_info *tl = SWIG_Runtime_GetTypeList();\n SWIG_PropagateClientDataTL(tl, type);\n}\n\n#define SWIG_GetTypeList() SWIG_Runtime_GetTypeList()\n#define SWIG_TypeQuery(name) SWIG_Runtime_TypeQuery(name)\n#define SWIG_TypeRegister(ti) SWIG_Runtime_TypeRegister(ti)\n#define SWIG_TypeClientData(ti, cd) SWIG_Runtime_TypeClientData(ti, cd)\n#define SWIG_PropagateClientData(ti) SWIG_Runtime_PropagateClientData(ti)\n\n\"\"\"\n", "methods": [], "methods_before": [], "changed_methods": [], "nloc": 2945, "complexity": 0, "token_count": 9, "diff_parsed": { "added": [ "\"\"\"", "", "######################################################################", "# This is for SWIG-1.3.x where x >= 25.", "# SWIG_RUNTIME_VERSION == \"2\"", "", "# All this does is to include the contents of the file generated by", "# this command:", "# swig -python -external-runtime", "swigptr2_code_v2 = \"\"\"", "/* ----------------------------------------------------------------------------", " * This file was automatically generated by SWIG (http://www.swig.org).", " * Version 1.3.25", " *", " * This file is not intended to be easily readable and contains a number of", " * coding conventions designed to improve portability and efficiency. Do not make", " * changes to this file unless you know what you are doing--modify the SWIG", " * interface file instead.", " * ----------------------------------------------------------------------------- */", "", "/***********************************************************************", " *", " * This section contains generic SWIG labels for method/variable", " * declarations/attributes, and other compiler dependent labels.", " *", " ************************************************************************/", "", "/*", " SWIGTEMPLATEDISAMBIGUATOR is needed when wrapping template calls", " (cwrap.c:Swig_cfunction_call/Swig_cmethod_call), as in", "", " result = nspace::template function(arg1);", " result = arg1->template method(arg2);", "", " SWIGTEMPLATEDISAMBIGUATOR is compiler dependent (common.swg),", " - SUN Studio requires 'template',", " - gcc-3.4 forbids the use of 'template'.", " - gcc-3.2.3 produces internal errors if you use 'template'", "*/", "#ifndef SWIGTEMPLATEDISAMBIGUATOR", "# if defined(__SUNPRO_CC)", "# define SWIGTEMPLATEDISAMBIGUATOR template", "# else", "# define SWIGTEMPLATEDISAMBIGUATOR", "# endif", "#endif", "", "/* inline attribute */", "#ifndef SWIGINLINE", "# if defined(__cplusplus) || (defined(__GNUC__) && !defined(__STRICT_ANSI__))", "# define SWIGINLINE inline", "# else", "# define SWIGINLINE", "# endif", "#endif", "", "/* attritbute passed for some compilers to avoid 'unused' warnings */", "#ifndef SWIGUNUSED", "# if defined(__GNUC__) || defined(__ICC)", "# define SWIGUNUSED __attribute__ ((unused))", "# else", "# define SWIGUNUSED", "# endif", "#endif", "", "/* internal SWIG method */", "#ifndef SWIGINTERN", "# define SWIGINTERN static SWIGUNUSED", "#endif", "", "/* internal inline SWIG method */", "#ifndef SWIGINTERNINLINE", "# define SWIGINTERNINLINE SWIGINTERN SWIGINLINE", "#endif", "", "/* how we export a method such that it can go in to a shared or dll library */", "#ifndef SWIGEXPORT", "# if defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__)", "# if defined(_MSC_VER) || defined(__GNUC__)", "# if defined(STATIC_LINKED)", "# define SWIGEXPORT(a) a", "# else", "# define SWIGEXPORT(a) __declspec(dllexport) a", "# endif", "# else", "# if defined(__BORLANDC__)", "# define SWIGEXPORT(a) a _export", "# else", "# define SWIGEXPORT(a) a", "# endif", "# endif", "# else", "# define SWIGEXPORT(a) a", "# endif", "#endif", "", "/***********************************************************************", " * swigrun.swg", " *", " * This file contains generic CAPI SWIG runtime support for pointer", " * type checking.", " *", " ************************************************************************/", "", "/* This should only be incremented when either the layout of swig_type_info changes,", " or for whatever reason, the runtime changes incompatibly */", "#define SWIG_RUNTIME_VERSION \"2\"", "", "/* define SWIG_TYPE_TABLE_NAME as \"SWIG_TYPE_TABLE\" */", "#ifdef SWIG_TYPE_TABLE", "# define SWIG_QUOTE_STRING(x) #x", "# define SWIG_EXPAND_AND_QUOTE_STRING(x) SWIG_QUOTE_STRING(x)", "# define SWIG_TYPE_TABLE_NAME SWIG_EXPAND_AND_QUOTE_STRING(SWIG_TYPE_TABLE)", "#else", "# define SWIG_TYPE_TABLE_NAME", "#endif", "", "/*", " You can use the SWIGRUNTIME and SWIGRUNTIMEINLINE macros for", " creating a static or dynamic library from the swig runtime code.", " In 99.9% of the cases, swig just needs to declare them as 'static'.", "", " But only do this if is strictly necessary, ie, if you have problems", " with your compiler or so.", "*/", "", "#ifndef SWIGRUNTIME", "# define SWIGRUNTIME SWIGINTERN", "#endif", "", "#ifndef SWIGRUNTIMEINLINE", "# define SWIGRUNTIMEINLINE SWIGRUNTIME SWIGINLINE", "#endif", "", "#include ", "", "#ifdef __cplusplus", "extern \"C\" {", "#endif", "", "typedef void *(*swig_converter_func)(void *);", "typedef struct swig_type_info *(*swig_dycast_func)(void **);", "", "/* Structure to store inforomation on one type */", "typedef struct swig_type_info {", " const char *name;\t\t\t/* mangled name of this type */", " const char *str;\t\t\t/* human readable name of this type */", " swig_dycast_func dcast;\t\t/* dynamic cast function down a hierarchy */", " struct swig_cast_info *cast;\t\t\t/* linked list of types that can cast into this type */", " void *clientdata;\t\t/* language specific type data */", "} swig_type_info;", "", "/* Structure to store a type and conversion function used for casting */", "typedef struct swig_cast_info {", " swig_type_info *type;\t\t\t/* pointer to type that is equivalent to this type */", " swig_converter_func converter;\t\t/* function to cast the void pointers */", " struct swig_cast_info *next;\t\t\t/* pointer to next cast in linked list */", " struct swig_cast_info *prev;\t\t\t/* pointer to the previous cast */", "} swig_cast_info;", "", "/* Structure used to store module information", " * Each module generates one structure like this, and the runtime collects", " * all of these structures and stores them in a circularly linked list.*/", "typedef struct swig_module_info {", " swig_type_info **types;\t\t/* Array of pointers to swig_type_info structures that are in this module */", " size_t size;\t\t /* Number of types in this module */", " struct swig_module_info *next;\t\t/* Pointer to next element in circularly linked list */", " swig_type_info **type_initial;\t/* Array of initially generated type structures */", " swig_cast_info **cast_initial;\t/* Array of initially generated casting structures */", " void *clientdata;\t\t/* Language specific module data */", "} swig_module_info;", "", "", "/*", " Compare two type names skipping the space characters, therefore", " \"char*\" == \"char *\" and \"Class\" == \"Class\", etc.", "", " Return 0 when the two name types are equivalent, as in", " strncmp, but skipping ' '.", "*/", "SWIGRUNTIME int", "SWIG_TypeNameComp(const char *f1, const char *l1,", "\t\t const char *f2, const char *l2) {", " for (;(f1 != l1) && (f2 != l2); ++f1, ++f2) {", " while ((*f1 == ' ') && (f1 != l1)) ++f1;", " while ((*f2 == ' ') && (f2 != l2)) ++f2;", " if (*f1 != *f2) return (int)(*f1 - *f2);", " }", " return (l1 - f1) - (l2 - f2);", "}", "", "/*", " Check type equivalence in a name list like ||...", " Return 0 if not equal, 1 if equal", "*/", "SWIGRUNTIME int", "SWIG_TypeEquiv(const char *nb, const char *tb) {", " int equiv = 0;", " const char* te = tb + strlen(tb);", " const char* ne = nb;", " while (!equiv && *ne) {", " for (nb = ne; *ne; ++ne) {", " if (*ne == '|') break;", " }", " equiv = (SWIG_TypeNameComp(nb, ne, tb, te) == 0) ? 1 : 0;", " if (*ne) ++ne;", " }", " return equiv;", "}", "", "/*", " Check type equivalence in a name list like ||...", " Return 0 if equal, -1 if nb < tb, 1 if nb > tb", "*/", "SWIGRUNTIME int", "SWIG_TypeCompare(const char *nb, const char *tb) {", " int equiv = 0;", " const char* te = tb + strlen(tb);", " const char* ne = nb;", " while (!equiv && *ne) {", " for (nb = ne; *ne; ++ne) {", " if (*ne == '|') break;", " }", " equiv = (SWIG_TypeNameComp(nb, ne, tb, te) == 0) ? 1 : 0;", " if (*ne) ++ne;", " }", " return equiv;", "}", "", "", "/* think of this as a c++ template<> or a scheme macro */", "#define SWIG_TypeCheck_Template(comparison, ty) \\", " if (ty) { \\", " swig_cast_info *iter = ty->cast; \\", " while (iter) { \\", " if (comparison) { \\", " if (iter == ty->cast) return iter; \\", " /* Move iter to the top of the linked list */ \\", " iter->prev->next = iter->next; \\", " if (iter->next) \\", " iter->next->prev = iter->prev; \\", " iter->next = ty->cast; \\", " iter->prev = 0; \\", " if (ty->cast) ty->cast->prev = iter; \\", " ty->cast = iter; \\", " return iter; \\", " } \\", " iter = iter->next; \\", " } \\", " } \\", " return 0", "", "/*", " Check the typename", "*/", "SWIGRUNTIME swig_cast_info *", "SWIG_TypeCheck(const char *c, swig_type_info *ty) {", " SWIG_TypeCheck_Template(strcmp(iter->type->name, c) == 0, ty);", "}", "", "/* Same as previous function, except strcmp is replaced with a pointer comparison */", "SWIGRUNTIME swig_cast_info *", "SWIG_TypeCheckStruct(swig_type_info *from, swig_type_info *into) {", " SWIG_TypeCheck_Template(iter->type == from, into);", "}", "", "/*", " Cast a pointer up an inheritance hierarchy", "*/", "SWIGRUNTIMEINLINE void *", "SWIG_TypeCast(swig_cast_info *ty, void *ptr) {", " return ((!ty) || (!ty->converter)) ? ptr : (*ty->converter)(ptr);", "}", "", "/*", " Dynamic pointer casting. Down an inheritance hierarchy", "*/", "SWIGRUNTIME swig_type_info *", "SWIG_TypeDynamicCast(swig_type_info *ty, void **ptr) {", " swig_type_info *lastty = ty;", " if (!ty || !ty->dcast) return ty;", " while (ty && (ty->dcast)) {", " ty = (*ty->dcast)(ptr);", " if (ty) lastty = ty;", " }", " return lastty;", "}", "", "/*", " Return the name associated with this type", "*/", "SWIGRUNTIMEINLINE const char *", "SWIG_TypeName(const swig_type_info *ty) {", " return ty->name;", "}", "", "/*", " Return the pretty name associated with this type,", " that is an unmangled type name in a form presentable to the user.", "*/", "SWIGRUNTIME const char *", "SWIG_TypePrettyName(const swig_type_info *type) {", " /* The \"str\" field contains the equivalent pretty names of the", " type, separated by vertical-bar characters. We choose", " to print the last name, as it is often (?) the most", " specific. */", " if (type->str != NULL) {", " const char *last_name = type->str;", " const char *s;", " for (s = type->str; *s; s++)", " if (*s == '|') last_name = s+1;", " return last_name;", " }", " else", " return type->name;", "}", "", "/*", " Set the clientdata field for a type", "*/", "SWIGRUNTIME void", "SWIG_TypeClientData(swig_type_info *ti, void *clientdata) {", " if (!ti->clientdata) {", " swig_cast_info *cast = ti->cast;", " /* if (ti->clientdata == clientdata) return; */", " ti->clientdata = clientdata;", "", " while (cast) {", " if (!cast->converter)", "\tSWIG_TypeClientData(cast->type, clientdata);", " cast = cast->next;", " }", " }", "}", "", "/*", " Search for a swig_type_info structure only by mangled name", " Search is a O(log #types)", "", " We start searching at module start, and finish searching when start == end.", " Note: if start == end at the beginning of the function, we go all the way around", " the circular list.", "*/", "SWIGRUNTIME swig_type_info *", "SWIG_MangledTypeQueryModule(swig_module_info *start,", " swig_module_info *end,", "\t\t const char *name) {", " swig_module_info *iter = start;", " do {", " if (iter->size) {", " register size_t l = 0;", " register size_t r = iter->size - 1;", " do {", "\t/* since l+r >= 0, we can (>> 1) instead (/ 2) */", "\tregister size_t i = (l + r) >> 1;", "\tconst char *iname = iter->types[i]->name;", "\tif (iname) {", "\t register int compare = strcmp(name, iname);", "\t if (compare == 0) {", "\t return iter->types[i];", "\t } else if (compare < 0) {", "\t if (i) {", "\t r = i - 1;", "\t } else {", "\t break;", "\t }", "\t } else if (compare > 0) {", "\t l = i + 1;", "\t }", "\t} else {", "\t break; /* should never happen */", "\t}", " } while (l <= r);", " }", " iter = iter->next;", " } while (iter != end);", " return 0;", "}", "", "/*", " Search for a swig_type_info structure for either a mangled name or a human readable name.", " It first searches the mangled names of the types, which is a O(log #types)", " If a type is not found it then searches the human readable names, which is O(#types).", "", " We start searching at module start, and finish searching when start == end.", " Note: if start == end at the beginning of the function, we go all the way around", " the circular list.", "*/", "SWIGRUNTIME swig_type_info *", "SWIG_TypeQueryModule(swig_module_info *start,", " swig_module_info *end,", "\t\t const char *name) {", " /* STEP 1: Search the name field using binary search */", " swig_type_info *ret = SWIG_MangledTypeQueryModule(start, end, name);", " if (ret) {", " return ret;", " } else {", " /* STEP 2: If the type hasn't been found, do a complete search", " of the str field (the human readable name) */", " swig_module_info *iter = start;", " do {", " register size_t i = 0;", " for (; i < iter->size; ++i) {", "\tif (iter->types[i]->str && (SWIG_TypeEquiv(iter->types[i]->str, name)))", "\t return iter->types[i];", " }", " iter = iter->next;", " } while (iter != end);", " }", "", " /* neither found a match */", " return 0;", "}", "", "", "/*", " Pack binary data into a string", "*/", "SWIGRUNTIME char *", "SWIG_PackData(char *c, void *ptr, size_t sz) {", " static const char hex[17] = \"0123456789abcdef\";", " register const unsigned char *u = (unsigned char *) ptr;", " register const unsigned char *eu = u + sz;", " for (; u != eu; ++u) {", " register unsigned char uu = *u;", " *(c++) = hex[(uu & 0xf0) >> 4];", " *(c++) = hex[uu & 0xf];", " }", " return c;", "}", "", "/*", " Unpack binary data from a string", "*/", "SWIGRUNTIME const char *", "SWIG_UnpackData(const char *c, void *ptr, size_t sz) {", " register unsigned char *u = (unsigned char *) ptr;", " register const unsigned char *eu = u + sz;", " for (; u != eu; ++u) {", " register char d = *(c++);", " register unsigned char uu = 0;", " if ((d >= '0') && (d <= '9'))", " uu = ((d - '0') << 4);", " else if ((d >= 'a') && (d <= 'f'))", " uu = ((d - ('a'-10)) << 4);", " else", " return (char *) 0;", " d = *(c++);", " if ((d >= '0') && (d <= '9'))", " uu |= (d - '0');", " else if ((d >= 'a') && (d <= 'f'))", " uu |= (d - ('a'-10));", " else", " return (char *) 0;", " *u = uu;", " }", " return c;", "}", "", "/*", " Pack 'void *' into a string buffer.", "*/", "SWIGRUNTIME char *", "SWIG_PackVoidPtr(char *buff, void *ptr, const char *name, size_t bsz) {", " char *r = buff;", " if ((2*sizeof(void *) + 2) > bsz) return 0;", " *(r++) = '_';", " r = SWIG_PackData(r,&ptr,sizeof(void *));", " if (strlen(name) + 1 > (bsz - (r - buff))) return 0;", " strcpy(r,name);", " return buff;", "}", "", "SWIGRUNTIME const char *", "SWIG_UnpackVoidPtr(const char *c, void **ptr, const char *name) {", " if (*c != '_') {", " if (strcmp(c,\"NULL\") == 0) {", " *ptr = (void *) 0;", " return name;", " } else {", " return 0;", " }", " }", " return SWIG_UnpackData(++c,ptr,sizeof(void *));", "}", "", "SWIGRUNTIME char *", "SWIG_PackDataName(char *buff, void *ptr, size_t sz, const char *name, size_t bsz) {", " char *r = buff;", " size_t lname = (name ? strlen(name) : 0);", " if ((2*sz + 2 + lname) > bsz) return 0;", " *(r++) = '_';", " r = SWIG_PackData(r,ptr,sz);", " if (lname) {", " strncpy(r,name,lname+1);", " } else {", " *r = 0;", " }", " return buff;", "}", "", "SWIGRUNTIME const char *", "SWIG_UnpackDataName(const char *c, void *ptr, size_t sz, const char *name) {", " if (*c != '_') {", " if (strcmp(c,\"NULL\") == 0) {", " memset(ptr,0,sz);", " return name;", " } else {", " return 0;", " }", " }", " return SWIG_UnpackData(++c,ptr,sz);", "}", "", "#ifdef __cplusplus", "}", "#endif", "", "/***********************************************************************", " * pyrun.swg", " *", " * This file contains the runtime support for Python modules", " * and includes code for managing global variables and pointer", " * type checking.", " *", " * Author : David Beazley (beazley@cs.uchicago.edu)", " ************************************************************************/", "", "/* Common SWIG API */", "#define SWIG_ConvertPtr(obj, pp, type, flags) SWIG_Python_ConvertPtr(obj, pp, type, flags)", "#define SWIG_NewPointerObj(p, type, flags) SWIG_Python_NewPointerObj(p, type, flags)", "#define SWIG_MustGetPtr(p, type, argnum, flags) SWIG_Python_MustGetPtr(p, type, argnum, flags)", "", "", "/* Python-specific SWIG API */", "#define SWIG_ConvertPacked(obj, ptr, sz, ty, flags) SWIG_Python_ConvertPacked(obj, ptr, sz, ty, flags)", "#define SWIG_NewPackedObj(ptr, sz, type) SWIG_Python_NewPackedObj(ptr, sz, type)", "", "/* Runtime API */", "#define SWIG_GetModule(clientdata) SWIG_Python_GetModule()", "#define SWIG_SetModule(clientdata, pointer) SWIG_Python_SetModule(pointer)", "", "/* -----------------------------------------------------------------------------", " * Pointer declarations", " * ----------------------------------------------------------------------------- */", "/*", " Use SWIG_NO_COBJECT_TYPES to force the use of strings to represent", " C/C++ pointers in the python side. Very useful for debugging, but", " not always safe.", "*/", "#if !defined(SWIG_NO_COBJECT_TYPES) && !defined(SWIG_COBJECT_TYPES)", "# define SWIG_COBJECT_TYPES", "#endif", "", "/* Flags for pointer conversion */", "#define SWIG_POINTER_EXCEPTION 0x1", "#define SWIG_POINTER_DISOWN 0x2", "", "", "/* Add PyOS_snprintf for old Pythons */", "#if PY_VERSION_HEX < 0x02020000", "#define PyOS_snprintf snprintf", "#endif", "", "#ifdef __cplusplus", "extern \"C\" {", "#endif", "", "/* -----------------------------------------------------------------------------", " * Create a new pointer string", " * ----------------------------------------------------------------------------- */", "#ifndef SWIG_BUFFER_SIZE", "#define SWIG_BUFFER_SIZE 1024", "#endif", "", "#if defined(SWIG_COBJECT_TYPES)", "#if !defined(SWIG_COBJECT_PYTHON)", "/* -----------------------------------------------------------------------------", " * Implements a simple Swig Object type, and use it instead of PyCObject", " * ----------------------------------------------------------------------------- */", "", "typedef struct {", " PyObject_HEAD", " void *ptr;", " const char *desc;", "} PySwigObject;", "", "/* Declarations for objects of type PySwigObject */", "", "SWIGRUNTIME int", "PySwigObject_print(PySwigObject *v, FILE *fp, int flags)", "{", " char result[SWIG_BUFFER_SIZE];", " flags = flags;", " if (SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result))) {", " fputs(\"\", fp);", " return 0;", " } else {", " return 1;", " }", "}", "", "SWIGRUNTIME PyObject *", "PySwigObject_repr(PySwigObject *v)", "{", " char result[SWIG_BUFFER_SIZE];", " return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?", " PyString_FromFormat(\"\", result) : 0;", "}", "", "SWIGRUNTIME PyObject *", "PySwigObject_str(PySwigObject *v)", "{", " char result[SWIG_BUFFER_SIZE];", " return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?", " PyString_FromString(result) : 0;", "}", "", "SWIGRUNTIME PyObject *", "PySwigObject_long(PySwigObject *v)", "{", " return PyLong_FromVoidPtr(v->ptr);", "}", "", "SWIGRUNTIME PyObject *", "PySwigObject_format(const char* fmt, PySwigObject *v)", "{", " PyObject *res = NULL;", " PyObject *args = PyTuple_New(1);", " if (args && (PyTuple_SetItem(args, 0, PySwigObject_long(v)) == 0)) {", " PyObject *ofmt = PyString_FromString(fmt);", " if (ofmt) {", " res = PyString_Format(ofmt,args);", " Py_DECREF(ofmt);", " }", " Py_DECREF(args);", " }", " return res;", "}", "", "SWIGRUNTIME PyObject *", "PySwigObject_oct(PySwigObject *v)", "{", " return PySwigObject_format(\"%o\",v);", "}", "", "SWIGRUNTIME PyObject *", "PySwigObject_hex(PySwigObject *v)", "{", " return PySwigObject_format(\"%x\",v);", "}", "", "SWIGRUNTIME int", "PySwigObject_compare(PySwigObject *v, PySwigObject *w)", "{", " int c = strcmp(v->desc, w->desc);", " if (c) {", " return (c > 0) ? 1 : -1;", " } else {", " void *i = v->ptr;", " void *j = w->ptr;", " return (i < j) ? -1 : ((i > j) ? 1 : 0);", " }", "}", "", "SWIGRUNTIME void", "PySwigObject_dealloc(PySwigObject *self)", "{", " PyObject_DEL(self);", "}", "", "SWIGRUNTIME PyTypeObject*", "PySwigObject_type(void) {", " static char pyswigobject_type__doc__[] =", " \"Swig object carries a C/C++ instance pointer\";", "", " static PyNumberMethods PySwigObject_as_number = {", " (binaryfunc)0, /*nb_add*/", " (binaryfunc)0, /*nb_subtract*/", " (binaryfunc)0, /*nb_multiply*/", " (binaryfunc)0, /*nb_divide*/", " (binaryfunc)0, /*nb_remainder*/", " (binaryfunc)0, /*nb_divmod*/", " (ternaryfunc)0,/*nb_power*/", " (unaryfunc)0, /*nb_negative*/", " (unaryfunc)0, /*nb_positive*/", " (unaryfunc)0, /*nb_absolute*/", " (inquiry)0, /*nb_nonzero*/", " 0,\t\t /*nb_invert*/", " 0,\t\t /*nb_lshift*/", " 0,\t\t /*nb_rshift*/", " 0,\t\t /*nb_and*/", " 0,\t\t /*nb_xor*/", " 0,\t\t /*nb_or*/", " (coercion)0, /*nb_coerce*/", " (unaryfunc)PySwigObject_long, /*nb_int*/", " (unaryfunc)PySwigObject_long, /*nb_long*/", " (unaryfunc)0, /*nb_float*/", " (unaryfunc)PySwigObject_oct, /*nb_oct*/", " (unaryfunc)PySwigObject_hex, /*nb_hex*/", "#if PY_VERSION_HEX >= 0x02000000", " 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 /* nb_inplace_add -> nb_inplace_true_divide */", "#endif", " };", "", " static PyTypeObject pyswigobject_type", "#if !defined(__cplusplus)", " ;", " static int type_init = 0;", " if (!type_init) {", " PyTypeObject tmp", "#endif", " = {", " PyObject_HEAD_INIT(&PyType_Type)", " 0,\t\t\t\t\t/*ob_size*/", " \"PySwigObject\",\t\t\t/*tp_name*/", " sizeof(PySwigObject),\t\t/*tp_basicsize*/", " 0,\t\t\t\t\t/*tp_itemsize*/", " /* methods */", " (destructor)PySwigObject_dealloc,\t/*tp_dealloc*/", " (printfunc)PySwigObject_print,\t/*tp_print*/", " (getattrfunc)0,\t\t\t/*tp_getattr*/", " (setattrfunc)0,\t\t\t/*tp_setattr*/", " (cmpfunc)PySwigObject_compare,\t/*tp_compare*/", " (reprfunc)PySwigObject_repr,\t/*tp_repr*/", " &PySwigObject_as_number,\t /*tp_as_number*/", " 0,\t\t\t\t\t/*tp_as_sequence*/", " 0,\t\t\t\t\t/*tp_as_mapping*/", " (hashfunc)0,\t\t\t/*tp_hash*/", " (ternaryfunc)0,\t\t\t/*tp_call*/", " (reprfunc)PySwigObject_str,\t\t/*tp_str*/", " /* Space for future expansion */", " 0,0,0,0,", " pyswigobject_type__doc__, \t /* Documentation string */", "#if PY_VERSION_HEX >= 0x02000000", " 0, /* tp_traverse */", " 0, /* tp_clear */", "#endif", "#if PY_VERSION_HEX >= 0x02010000", " 0, /* tp_richcompare */", " 0, /* tp_weaklistoffset */", "#endif", "#if PY_VERSION_HEX >= 0x02020000", " 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */", "#endif", "#if PY_VERSION_HEX >= 0x02030000", " 0, /* tp_del */", "#endif", "#ifdef COUNT_ALLOCS", " 0,0,0,0 /* tp_alloc -> tp_next */", "#endif", " };", "#if !defined(__cplusplus)", " pyswigobject_type = tmp;", " type_init = 1;", " }", "#endif", " return &pyswigobject_type;", "}", "", "SWIGRUNTIME PyObject *", "PySwigObject_FromVoidPtrAndDesc(void *ptr, const char *desc)", "{", " PySwigObject *self = PyObject_NEW(PySwigObject, PySwigObject_type());", " if (self) {", " self->ptr = ptr;", " self->desc = desc;", " }", " return (PyObject *)self;", "}", "", "SWIGRUNTIMEINLINE void *", "PySwigObject_AsVoidPtr(PyObject *self)", "{", " return ((PySwigObject *)self)->ptr;", "}", "", "SWIGRUNTIMEINLINE const char *", "PySwigObject_GetDesc(PyObject *self)", "{", " return ((PySwigObject *)self)->desc;", "}", "", "SWIGRUNTIMEINLINE int", "PySwigObject_Check(PyObject *op) {", " return ((op)->ob_type == PySwigObject_type())", " || (strcmp((op)->ob_type->tp_name,\"PySwigObject\") == 0);", "}", "", "/* -----------------------------------------------------------------------------", " * Implements a simple Swig Packed type, and use it instead of string", " * ----------------------------------------------------------------------------- */", "", "typedef struct {", " PyObject_HEAD", " void *pack;", " const char *desc;", " size_t size;", "} PySwigPacked;", "", "SWIGRUNTIME int", "PySwigPacked_print(PySwigPacked *v, FILE *fp, int flags)", "{", " char result[SWIG_BUFFER_SIZE];", " flags = flags;", " fputs(\"pack, v->size, 0, sizeof(result))) {", " fputs(\"at \", fp);", " fputs(result, fp);", " }", " fputs(v->desc,fp);", " fputs(\">\", fp);", " return 0;", "}", "", "SWIGRUNTIME PyObject *", "PySwigPacked_repr(PySwigPacked *v)", "{", " char result[SWIG_BUFFER_SIZE];", " if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))) {", " return PyString_FromFormat(\"\", result, v->desc);", " } else {", " return PyString_FromFormat(\"\", v->desc);", " }", "}", "", "SWIGRUNTIME PyObject *", "PySwigPacked_str(PySwigPacked *v)", "{", " char result[SWIG_BUFFER_SIZE];", " if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))){", " return PyString_FromFormat(\"%s%s\", result, v->desc);", " } else {", " return PyString_FromFormat(\"%s\", v->desc);", " }", "}", "", "SWIGRUNTIME int", "PySwigPacked_compare(PySwigPacked *v, PySwigPacked *w)", "{", " int c = strcmp(v->desc, w->desc);", " if (c) {", " return (c > 0) ? 1 : -1;", " } else {", " size_t i = v->size;", " size_t j = w->size;", " int s = (i < j) ? -1 : ((i > j) ? 1 : 0);", " return s ? s : strncmp((char *)v->pack, (char *)w->pack, 2*v->size);", " }", "}", "", "SWIGRUNTIME void", "PySwigPacked_dealloc(PySwigPacked *self)", "{", " free(self->pack);", " PyObject_DEL(self);", "}", "", "SWIGRUNTIME PyTypeObject*", "PySwigPacked_type(void) {", " static char pyswigpacked_type__doc__[] =", " \"Swig object carries a C/C++ instance pointer\";", " static PyTypeObject pyswigpacked_type", "#if !defined(__cplusplus)", " ;", " static int type_init = 0;", " if (!type_init) {", " PyTypeObject tmp", "#endif", " = {", " PyObject_HEAD_INIT(&PyType_Type)", " 0,\t\t\t\t\t/*ob_size*/", " \"PySwigPacked\",\t\t\t/*tp_name*/", " sizeof(PySwigPacked),\t\t/*tp_basicsize*/", " 0,\t\t\t\t\t/*tp_itemsize*/", " /* methods */", " (destructor)PySwigPacked_dealloc,\t/*tp_dealloc*/", " (printfunc)PySwigPacked_print,\t/*tp_print*/", " (getattrfunc)0,\t\t\t/*tp_getattr*/", " (setattrfunc)0,\t\t\t/*tp_setattr*/", " (cmpfunc)PySwigPacked_compare,\t/*tp_compare*/", " (reprfunc)PySwigPacked_repr,\t/*tp_repr*/", " 0,\t /*tp_as_number*/", " 0,\t\t\t\t\t/*tp_as_sequence*/", " 0,\t\t\t\t\t/*tp_as_mapping*/", " (hashfunc)0,\t\t\t/*tp_hash*/", " (ternaryfunc)0,\t\t\t/*tp_call*/", " (reprfunc)PySwigPacked_str,\t\t/*tp_str*/", " /* Space for future expansion */", " 0,0,0,0,", " pyswigpacked_type__doc__, \t /* Documentation string */", "#if PY_VERSION_HEX >= 0x02000000", " 0, /* tp_traverse */", " 0, /* tp_clear */", "#endif", "#if PY_VERSION_HEX >= 0x02010000", " 0, /* tp_richcompare */", " 0, /* tp_weaklistoffset */", "#endif", "#if PY_VERSION_HEX >= 0x02020000", " 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */", "#endif", "#if PY_VERSION_HEX >= 0x02030000", " 0, /* tp_del */", "#endif", "#ifdef COUNT_ALLOCS", " 0,0,0,0 /* tp_alloc -> tp_next */", "#endif", " };", "#if !defined(__cplusplus)", " pyswigpacked_type = tmp;", " type_init = 1;", " }", "#endif", " return &pyswigpacked_type;", "}", "", "SWIGRUNTIME PyObject *", "PySwigPacked_FromDataAndDesc(void *ptr, size_t size, const char *desc)", "{", " PySwigPacked *self = PyObject_NEW(PySwigPacked, PySwigPacked_type());", " if (self == NULL) {", " return NULL;", " } else {", " void *pack = malloc(size);", " if (pack) {", " memcpy(pack, ptr, size);", " self->pack = pack;", " self->desc = desc;", " self->size = size;", " return (PyObject *) self;", " }", " return NULL;", " }", "}", "", "SWIGRUNTIMEINLINE const char *", "PySwigPacked_UnpackData(PyObject *obj, void *ptr, size_t size)", "{", " PySwigPacked *self = (PySwigPacked *)obj;", " if (self->size != size) return 0;", " memcpy(ptr, self->pack, size);", " return self->desc;", "}", "", "SWIGRUNTIMEINLINE const char *", "PySwigPacked_GetDesc(PyObject *self)", "{", " return ((PySwigPacked *)self)->desc;", "}", "", "SWIGRUNTIMEINLINE int", "PySwigPacked_Check(PyObject *op) {", " return ((op)->ob_type == PySwigPacked_type())", " || (strcmp((op)->ob_type->tp_name,\"PySwigPacked\") == 0);", "}", "", "#else", "/* -----------------------------------------------------------------------------", " * Use the old Python PyCObject instead of PySwigObject", " * ----------------------------------------------------------------------------- */", "", "#define PySwigObject_GetDesc(obj)\t PyCObject_GetDesc(obj)", "#define PySwigObject_Check(obj)\t PyCObject_Check(obj)", "#define PySwigObject_AsVoidPtr(obj)\t PyCObject_AsVoidPtr(obj)", "#define PySwigObject_FromVoidPtrAndDesc(p, d) PyCObject_FromVoidPtrAndDesc(p, d, NULL)", "", "#endif", "", "#endif", "", "/* -----------------------------------------------------------------------------", " * errors manipulation", " * ----------------------------------------------------------------------------- */", "", "SWIGRUNTIME void", "SWIG_Python_TypeError(const char *type, PyObject *obj)", "{", " if (type) {", "#if defined(SWIG_COBJECT_TYPES)", " if (obj && PySwigObject_Check(obj)) {", " const char *otype = (const char *) PySwigObject_GetDesc(obj);", " if (otype) {", "\tPyErr_Format(PyExc_TypeError, \"a '%s' is expected, 'PySwigObject(%s)' is received\",", "\t\t type, otype);", "\treturn;", " }", " } else", "#endif", " {", " const char *otype = (obj ? obj->ob_type->tp_name : 0);", " if (otype) {", "\tPyObject *str = PyObject_Str(obj);", "\tconst char *cstr = str ? PyString_AsString(str) : 0;", "\tif (cstr) {", "\t PyErr_Format(PyExc_TypeError, \"a '%s' is expected, '%s(%s)' is received\",", "\t\t type, otype, cstr);", "\t} else {", "\t PyErr_Format(PyExc_TypeError, \"a '%s' is expected, '%s' is received\",", "\t\t type, otype);", "\t}", "\tPy_XDECREF(str);", "\treturn;", " }", " }", " PyErr_Format(PyExc_TypeError, \"a '%s' is expected\", type);", " } else {", " PyErr_Format(PyExc_TypeError, \"unexpected type is received\");", " }", "}", "", "SWIGRUNTIMEINLINE void", "SWIG_Python_NullRef(const char *type)", "{", " if (type) {", " PyErr_Format(PyExc_TypeError, \"null reference of type '%s' was received\",type);", " } else {", " PyErr_Format(PyExc_TypeError, \"null reference was received\");", " }", "}", "", "SWIGRUNTIME int", "SWIG_Python_AddErrMesg(const char* mesg, int infront)", "{", " if (PyErr_Occurred()) {", " PyObject *type = 0;", " PyObject *value = 0;", " PyObject *traceback = 0;", " PyErr_Fetch(&type, &value, &traceback);", " if (value) {", " PyObject *old_str = PyObject_Str(value);", " Py_XINCREF(type);", " PyErr_Clear();", " if (infront) {", "\tPyErr_Format(type, \"%s %s\", mesg, PyString_AsString(old_str));", " } else {", "\tPyErr_Format(type, \"%s %s\", PyString_AsString(old_str), mesg);", " }", " Py_DECREF(old_str);", " }", " return 1;", " } else {", " return 0;", " }", "}", "", "SWIGRUNTIME int", "SWIG_Python_ArgFail(int argnum)", "{", " if (PyErr_Occurred()) {", " /* add information about failing argument */", " char mesg[256];", " PyOS_snprintf(mesg, sizeof(mesg), \"argument number %d:\", argnum);", " return SWIG_Python_AddErrMesg(mesg, 1);", " } else {", " return 0;", " }", "}", "", "", "/* -----------------------------------------------------------------------------", " * pointers/data manipulation", " * ----------------------------------------------------------------------------- */", "", "/* Convert a pointer value */", "SWIGRUNTIME int", "SWIG_Python_ConvertPtr(PyObject *obj, void **ptr, swig_type_info *ty, int flags) {", " swig_cast_info *tc;", " const char *c = 0;", " static PyObject *SWIG_this = 0;", " int newref = 0;", " PyObject *pyobj = 0;", " void *vptr;", "", " if (!obj) return 0;", " if (obj == Py_None) {", " *ptr = 0;", " return 0;", " }", "", "#ifdef SWIG_COBJECT_TYPES", " if (!(PySwigObject_Check(obj))) {", " if (!SWIG_this)", " SWIG_this = PyString_FromString(\"this\");", " pyobj = obj;", " obj = PyObject_GetAttr(obj,SWIG_this);", " newref = 1;", " if (!obj) goto type_error;", " if (!PySwigObject_Check(obj)) {", " Py_DECREF(obj);", " goto type_error;", " }", " }", " vptr = PySwigObject_AsVoidPtr(obj);", " c = (const char *) PySwigObject_GetDesc(obj);", " if (newref) { Py_DECREF(obj); }", " goto type_check;", "#else", " if (!(PyString_Check(obj))) {", " if (!SWIG_this)", " SWIG_this = PyString_FromString(\"this\");", " pyobj = obj;", " obj = PyObject_GetAttr(obj,SWIG_this);", " newref = 1;", " if (!obj) goto type_error;", " if (!PyString_Check(obj)) {", " Py_DECREF(obj);", " goto type_error;", " }", " }", " c = PyString_AS_STRING(obj);", " /* Pointer values must start with leading underscore */", " c = SWIG_UnpackVoidPtr(c, &vptr, ty->name);", " if (newref) { Py_DECREF(obj); }", " if (!c) goto type_error;", "#endif", "", "type_check:", " if (ty) {", " tc = SWIG_TypeCheck(c,ty);", " if (!tc) goto type_error;", " *ptr = SWIG_TypeCast(tc,vptr);", " } else {", " *ptr = vptr;", " }", " if ((pyobj) && (flags & SWIG_POINTER_DISOWN)) {", " PyObject_SetAttrString(pyobj,(char*)\"thisown\",Py_False);", " }", " return 0;", "", "type_error:", " PyErr_Clear();", " if (pyobj && !obj) {", " obj = pyobj;", " if (PyCFunction_Check(obj)) {", " /* here we get the method pointer for callbacks */", " char *doc = (((PyCFunctionObject *)obj) -> m_ml -> ml_doc);", " c = doc ? strstr(doc, \"swig_ptr: \") : 0;", " if (c) {", "\tc = ty ? SWIG_UnpackVoidPtr(c + 10, &vptr, ty->name) : 0;", "\tif (!c) goto type_error;", "\tgoto type_check;", " }", " }", " }", " if (flags & SWIG_POINTER_EXCEPTION) {", " if (ty) {", " SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);", " } else {", " SWIG_Python_TypeError(\"C/C++ pointer\", obj);", " }", " }", " return -1;", "}", "", "/* Convert a pointer value, signal an exception on a type mismatch */", "SWIGRUNTIME void *", "SWIG_Python_MustGetPtr(PyObject *obj, swig_type_info *ty, int argnum, int flags) {", " void *result;", " if (SWIG_Python_ConvertPtr(obj, &result, ty, flags) == -1) {", " PyErr_Clear();", " if (flags & SWIG_POINTER_EXCEPTION) {", " SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);", " SWIG_Python_ArgFail(argnum);", " }", " }", " return result;", "}", "", "/* Convert a packed value value */", "SWIGRUNTIME int", "SWIG_Python_ConvertPacked(PyObject *obj, void *ptr, size_t sz, swig_type_info *ty, int flags) {", " swig_cast_info *tc;", " const char *c = 0;", "", "#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)", " c = PySwigPacked_UnpackData(obj, ptr, sz);", "#else", " if ((!obj) || (!PyString_Check(obj))) goto type_error;", " c = PyString_AS_STRING(obj);", " /* Pointer values must start with leading underscore */", " c = SWIG_UnpackDataName(c, ptr, sz, ty->name);", "#endif", " if (!c) goto type_error;", " if (ty) {", " tc = SWIG_TypeCheck(c,ty);", " if (!tc) goto type_error;", " }", " return 0;", "", "type_error:", " PyErr_Clear();", " if (flags & SWIG_POINTER_EXCEPTION) {", " if (ty) {", " SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);", " } else {", " SWIG_Python_TypeError(\"C/C++ packed data\", obj);", " }", " }", " return -1;", "}", "", "/* Create a new array object */", "SWIGRUNTIME PyObject *", "SWIG_Python_NewPointerObj(void *ptr, swig_type_info *type, int own) {", " PyObject *robj = 0;", " if (!type) {", " if (!PyErr_Occurred()) {", " PyErr_Format(PyExc_TypeError, \"Swig: null type passed to NewPointerObj\");", " }", " return robj;", " }", " if (!ptr) {", " Py_INCREF(Py_None);", " return Py_None;", " }", "#ifdef SWIG_COBJECT_TYPES", " robj = PySwigObject_FromVoidPtrAndDesc((void *) ptr, (char *)type->name);", "#else", " {", " char result[SWIG_BUFFER_SIZE];", " robj = SWIG_PackVoidPtr(result, ptr, type->name, sizeof(result)) ?", " PyString_FromString(result) : 0;", " }", "#endif", " if (!robj || (robj == Py_None)) return robj;", " if (type->clientdata) {", " PyObject *inst;", " PyObject *args = Py_BuildValue((char*)\"(O)\", robj);", " Py_DECREF(robj);", " inst = PyObject_CallObject((PyObject *) type->clientdata, args);", " Py_DECREF(args);", " if (inst) {", " if (own) {", " PyObject_SetAttrString(inst,(char*)\"thisown\",Py_True);", " }", " robj = inst;", " }", " }", " return robj;", "}", "", "SWIGRUNTIME PyObject *", "SWIG_Python_NewPackedObj(void *ptr, size_t sz, swig_type_info *type) {", " PyObject *robj = 0;", " if (!ptr) {", " Py_INCREF(Py_None);", " return Py_None;", " }", "#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)", " robj = PySwigPacked_FromDataAndDesc((void *) ptr, sz, (char *)type->name);", "#else", " {", " char result[SWIG_BUFFER_SIZE];", " robj = SWIG_PackDataName(result, ptr, sz, type->name, sizeof(result)) ?", " PyString_FromString(result) : 0;", " }", "#endif", " return robj;", "}", "", "/* -----------------------------------------------------------------------------*", " * Get type list", " * -----------------------------------------------------------------------------*/", "", "#ifdef SWIG_LINK_RUNTIME", "void *SWIG_ReturnGlobalTypeList(void *);", "#endif", "", "SWIGRUNTIME swig_module_info *", "SWIG_Python_GetModule(void) {", " static void *type_pointer = (void *)0;", " /* first check if module already created */", " if (!type_pointer) {", "#ifdef SWIG_LINK_RUNTIME", " type_pointer = SWIG_ReturnGlobalTypeList((void *)0);", "#else", " type_pointer = PyCObject_Import((char*)\"swig_runtime_data\" SWIG_RUNTIME_VERSION,", "\t\t\t\t (char*)\"type_pointer\" SWIG_TYPE_TABLE_NAME);", " if (PyErr_Occurred()) {", " PyErr_Clear();", " type_pointer = (void *)0;", " }", " }", "#endif", " return (swig_module_info *) type_pointer;", "}", "", "SWIGRUNTIME void", "SWIG_Python_SetModule(swig_module_info *swig_module) {", " static PyMethodDef swig_empty_runtime_method_table[] = { {NULL, NULL, 0, NULL} };/* Sentinel */", "", " PyObject *module = Py_InitModule((char*)\"swig_runtime_data\" SWIG_RUNTIME_VERSION,", "\t\t\t\t swig_empty_runtime_method_table);", " PyObject *pointer = PyCObject_FromVoidPtr((void *) swig_module, NULL);", " if (pointer && module) {", " PyModule_AddObject(module, (char*)\"type_pointer\" SWIG_TYPE_TABLE_NAME, pointer);", " }", "}", "", "#ifdef __cplusplus", "}", "#endif", "", "/* -----------------------------------------------------------------------------*", " Standard SWIG API for use inside user code.", "", " Don't include this file directly, run the command", " swig -python -external-runtime", " Also, read the Modules chapter of the SWIG Manual.", "", " * -----------------------------------------------------------------------------*/", "", "#ifdef SWIG_MODULE_CLIENTDATA_TYPE", "", "SWIGRUNTIMEINLINE swig_type_info *", "SWIG_TypeQuery(SWIG_MODULE_CLIENTDATA_TYPE clientdata, const char *name) {", " swig_module_info *module = SWIG_GetModule(clientdata);", " return SWIG_TypeQueryModule(module, module, name);", "}", "", "SWIGRUNTIMEINLINE swig_type_info *", "SWIG_MangledTypeQuery(SWIG_MODULE_CLIENTDATA_TYPE clientdata, const char *name) {", " swig_module_info *module = SWIG_GetModule(clientdata);", " return SWIG_MangledTypeQueryModule(module, module, name);", "}", "", "#else", "", "SWIGRUNTIMEINLINE swig_type_info *", "SWIG_TypeQuery(const char *name) {", " swig_module_info *module = SWIG_GetModule();", " return SWIG_TypeQueryModule(module, module, name);", "}", "", "SWIGRUNTIMEINLINE swig_type_info *", "SWIG_MangledTypeQuery(const char *name) {", " swig_module_info *module = SWIG_GetModule();", " return SWIG_MangledTypeQueryModule(module, module, name);", "}", "", "#endif", "", "" ], "deleted": [] } } ] }, { "hash": "195ab4a92b1d42a6926db3f9095ef774998270cc", "msg": "Modifications to support scipy.special for numarray.", "author": { "name": "jmiller", "email": "jmiller@localhost" }, "committer": { "name": "jmiller", "email": "jmiller@localhost" }, "author_date": "2005-05-09T19:12:53+00:00", "author_timezone": 0, "committer_date": "2005-05-09T19:12:53+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "4cd1f2cde08d338ef7825b1a610948c7c3e5dc22" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 38, "insertions": 41, "lines": 79, "files": 7, "dmm_unit_size": 1.0, "dmm_unit_complexity": 1.0, "dmm_unit_interfacing": 1.0, "modified_files": [ { "old_path": "scipy_base/_na_imports.py", "new_path": "scipy_base/_na_imports.py", "filename": "_na_imports.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -83,8 +83,6 @@ def restore_numeric():\n \n ArrayType = arraytype\n \n-NUMERIX_HEADER = \"numarray/arrayobject.h\"\n-\n class UfuncType(object):\n \"\"\"numarray ufuncs work differently than Numeric ufuncs and\n have no single UfuncType... TBD\"\"\"\n@@ -103,9 +101,16 @@ def zeros(shape, typecode='l', savespace=0):\n z = _na.zeros(shape=shape, type=typecode)\n return z\n \n+def asscalar(a):\n+ \"\"\"Returns Python scalar value corresponding to 'a' for rank-0 arrays\n+ or the unaltered array for non-rank-0.\"\"\"\n+ return a[()]\n+\n # _Error.setMode(dividebyzero=\"ignore\", invalid=\"ignore\")\n Error.setMode(all=\"ignore\")\n \n+NX_VERSION = 'numarray %s' % _na.__version__\n+\n # Must appear after all public definititions\n __all__ = []\n for k in globals().keys():\n@@ -114,4 +119,3 @@ def zeros(shape, typecode='l', savespace=0):\n __all__.append(\"_insert\")\n __all__.append(\"_unique\")\n \n-\n", "added_lines": 7, "deleted_lines": 3, "source_code": "\"\"\"Imports from numarray for numerix, the numarray/Numeric interchangeability\nmodule. These array functions are used when numarray is chosen.\n\"\"\"\nfrom ppimport import ppimport, ppimport_attr\nimport numarray as _na\nfrom numarray.numeric import *\nimport numarray.ufunc as fastumath\n\nimport _compiled_base\nfrom _compiled_base import arraymap, _unique, _insert\n\ntry:\n from numarray.ieeespecial import isinf, isnan, isfinite\nexcept ImportError,msg:\n isinf = isnan = isfinite = None\n print msg\n\nfrom numarray.ieeespecial import \\\n plus_inf as PINF, \\\n minus_inf as NINF, \\\n inf, \\\n inf as infty, \\\n inf as Infinity, \\\n nan, \\\n nan as NAN, \\\n nan as Nan\n\ntry:\n from numarray.ieeespecial import \\\n plus_zero as PZERO, \\\n minus_zero as NZERO\nexcept ImportError,msg:\n print msg\n\nimport numarray.linear_algebra as LinearAlgebra\nimport numarray.linear_algebra.mlab as MLab\nimport numarray.random_array as RandomArray\nfrom numarray.fft import fft\ntry:\n from numarray.matrix import Matrix\nexcept ImportError,msg:\n Matrix = None\n print msg\nfrom numarray.linear_algebra import inverse, eigenvectors\nfrom numarray.convolve import convolve, cross_correlate\nfrom numarray.arrayprint import array2string\n\n# LinearAlgebra = ppimport(\"numarray.linear_algebra\")\n# MLab = ppimport(\"numarray.mlab\")\n# inverse = ppimport_from(\"numarray.linear_algebra.inverse\")\n# eigenvectors = ppimport_from(\"numarray.linear_algebra.eigenvectors\")\n# convolve = ppimport_from(\"numarray.convolve.convolve\")\n# fft = ppimport_from(\"numarray.fft.fft\")\n# Matrix = ppimport_from(\"numarray.matrix.Matrix\")\n# RandomArray = ppimport(\"numarray.random_array\")\n\nclass _TypeNamespace:\n \"\"\"Numeric compatible type aliases for use with extension functions.\"\"\"\n Int8 = typecode[Int8]\n UInt8 = typecode[UInt8]\n Int16 = typecode[Int16]\n UInt16 = typecode[UInt16]\n Int32 = typecode[Int32]\n UInt32 = typecode[UInt32] \n Float32 = typecode[Float32]\n Float64 = typecode[Float64]\n Complex32 = typecode[Complex32]\n Complex64 = typecode[Complex64]\n\nnx = _TypeNamespace()\n\ndef alter_numeric():\n pass\n\ndef restore_numeric():\n pass\n\nconj = conjugate\n\nUnsignedInt8 = UInt8\nUnsignedInt16 = UInt16\nUnsignedInt32 = UInt32\n\nArrayType = arraytype\n\nclass UfuncType(object):\n \"\"\"numarray ufuncs work differently than Numeric ufuncs and\n have no single UfuncType... TBD\"\"\"\n pass\n\n\ndef zeros(shape, typecode='l', savespace=0):\n \"\"\"scipy version of numarray.zeros() which supports creation of object\n arrays as well as numerical arrays.\n \"\"\"\n if typecode == 'O':\n import numarray.objects as obj\n z = obj.ObjectArray(shape=shape)\n z[:] = 0\n else:\n z = _na.zeros(shape=shape, type=typecode)\n return z\n\ndef asscalar(a):\n \"\"\"Returns Python scalar value corresponding to 'a' for rank-0 arrays\n or the unaltered array for non-rank-0.\"\"\"\n return a[()]\n\n# _Error.setMode(dividebyzero=\"ignore\", invalid=\"ignore\")\nError.setMode(all=\"ignore\")\n\nNX_VERSION = 'numarray %s' % _na.__version__\n\n# Must appear after all public definititions\n__all__ = []\nfor k in globals().keys():\n if k[0] != \"_\":\n __all__.append(k)\n__all__.append(\"_insert\")\n__all__.append(\"_unique\")\n\n", "source_code_before": "\"\"\"Imports from numarray for numerix, the numarray/Numeric interchangeability\nmodule. These array functions are used when numarray is chosen.\n\"\"\"\nfrom ppimport import ppimport, ppimport_attr\nimport numarray as _na\nfrom numarray.numeric import *\nimport numarray.ufunc as fastumath\n\nimport _compiled_base\nfrom _compiled_base import arraymap, _unique, _insert\n\ntry:\n from numarray.ieeespecial import isinf, isnan, isfinite\nexcept ImportError,msg:\n isinf = isnan = isfinite = None\n print msg\n\nfrom numarray.ieeespecial import \\\n plus_inf as PINF, \\\n minus_inf as NINF, \\\n inf, \\\n inf as infty, \\\n inf as Infinity, \\\n nan, \\\n nan as NAN, \\\n nan as Nan\n\ntry:\n from numarray.ieeespecial import \\\n plus_zero as PZERO, \\\n minus_zero as NZERO\nexcept ImportError,msg:\n print msg\n\nimport numarray.linear_algebra as LinearAlgebra\nimport numarray.linear_algebra.mlab as MLab\nimport numarray.random_array as RandomArray\nfrom numarray.fft import fft\ntry:\n from numarray.matrix import Matrix\nexcept ImportError,msg:\n Matrix = None\n print msg\nfrom numarray.linear_algebra import inverse, eigenvectors\nfrom numarray.convolve import convolve, cross_correlate\nfrom numarray.arrayprint import array2string\n\n# LinearAlgebra = ppimport(\"numarray.linear_algebra\")\n# MLab = ppimport(\"numarray.mlab\")\n# inverse = ppimport_from(\"numarray.linear_algebra.inverse\")\n# eigenvectors = ppimport_from(\"numarray.linear_algebra.eigenvectors\")\n# convolve = ppimport_from(\"numarray.convolve.convolve\")\n# fft = ppimport_from(\"numarray.fft.fft\")\n# Matrix = ppimport_from(\"numarray.matrix.Matrix\")\n# RandomArray = ppimport(\"numarray.random_array\")\n\nclass _TypeNamespace:\n \"\"\"Numeric compatible type aliases for use with extension functions.\"\"\"\n Int8 = typecode[Int8]\n UInt8 = typecode[UInt8]\n Int16 = typecode[Int16]\n UInt16 = typecode[UInt16]\n Int32 = typecode[Int32]\n UInt32 = typecode[UInt32] \n Float32 = typecode[Float32]\n Float64 = typecode[Float64]\n Complex32 = typecode[Complex32]\n Complex64 = typecode[Complex64]\n\nnx = _TypeNamespace()\n\ndef alter_numeric():\n pass\n\ndef restore_numeric():\n pass\n\nconj = conjugate\n\nUnsignedInt8 = UInt8\nUnsignedInt16 = UInt16\nUnsignedInt32 = UInt32\n\nArrayType = arraytype\n\nNUMERIX_HEADER = \"numarray/arrayobject.h\"\n\nclass UfuncType(object):\n \"\"\"numarray ufuncs work differently than Numeric ufuncs and\n have no single UfuncType... TBD\"\"\"\n pass\n\n\ndef zeros(shape, typecode='l', savespace=0):\n \"\"\"scipy version of numarray.zeros() which supports creation of object\n arrays as well as numerical arrays.\n \"\"\"\n if typecode == 'O':\n import numarray.objects as obj\n z = obj.ObjectArray(shape=shape)\n z[:] = 0\n else:\n z = _na.zeros(shape=shape, type=typecode)\n return z\n\n# _Error.setMode(dividebyzero=\"ignore\", invalid=\"ignore\")\nError.setMode(all=\"ignore\")\n\n# Must appear after all public definititions\n__all__ = []\nfor k in globals().keys():\n if k[0] != \"_\":\n __all__.append(k)\n__all__.append(\"_insert\")\n__all__.append(\"_unique\")\n\n\n", "methods": [ { "name": "alter_numeric", "long_name": "alter_numeric( )", "filename": "_na_imports.py", "nloc": 2, "complexity": 1, "token_count": 5, "parameters": [], "start_line": 72, "end_line": 73, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 0 }, { "name": "restore_numeric", "long_name": "restore_numeric( )", "filename": "_na_imports.py", "nloc": 2, "complexity": 1, "token_count": 5, "parameters": [], "start_line": 75, "end_line": 76, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 0 }, { "name": "zeros", "long_name": "zeros( shape , typecode = 'l' , savespace = 0 )", "filename": "_na_imports.py", "nloc": 8, "complexity": 2, "token_count": 59, "parameters": [ "shape", "typecode", "savespace" ], "start_line": 92, "end_line": 102, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 0 }, { "name": "asscalar", "long_name": "asscalar( a )", "filename": "_na_imports.py", "nloc": 2, "complexity": 1, "token_count": 12, "parameters": [ "a" ], "start_line": 104, "end_line": 107, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 } ], "methods_before": [ { "name": "alter_numeric", "long_name": "alter_numeric( )", "filename": "_na_imports.py", "nloc": 2, "complexity": 1, "token_count": 5, "parameters": [], "start_line": 72, "end_line": 73, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 0 }, { "name": "restore_numeric", "long_name": "restore_numeric( )", "filename": "_na_imports.py", "nloc": 2, "complexity": 1, "token_count": 5, "parameters": [], "start_line": 75, "end_line": 76, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 0 }, { "name": "zeros", "long_name": "zeros( shape , typecode = 'l' , savespace = 0 )", "filename": "_na_imports.py", "nloc": 8, "complexity": 2, "token_count": 59, "parameters": [ "shape", "typecode", "savespace" ], "start_line": 94, "end_line": 104, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "asscalar", "long_name": "asscalar( a )", "filename": "_na_imports.py", "nloc": 2, "complexity": 1, "token_count": 12, "parameters": [ "a" ], "start_line": 104, "end_line": 107, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 } ], "nloc": 85, "complexity": 5, "token_count": 421, "diff_parsed": { "added": [ "def asscalar(a):", " \"\"\"Returns Python scalar value corresponding to 'a' for rank-0 arrays", " or the unaltered array for non-rank-0.\"\"\"", " return a[()]", "", "NX_VERSION = 'numarray %s' % _na.__version__", "" ], "deleted": [ "NUMERIX_HEADER = \"numarray/arrayobject.h\"", "", "" ] } }, { "old_path": "scipy_base/_nc_imports.py", "new_path": "scipy_base/_nc_imports.py", "filename": "_nc_imports.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -49,15 +49,12 @@ class _TypeNamespace:\n RandomArray = ppimport('RandomArray')\n MLab = ppimport('MLab')\n \n-NUMERIX_HEADER = \"Numeric/arrayobject.h\"\n-\n #\n # Force numerix to use scipy_base.fastumath instead of numerix.umath.\n #\n import sys as _sys\n _sys.modules['umath'] = fastumath\n \n-\n if Numeric.__version__ < '23.5':\n matrixmultiply=dot\n \n@@ -72,10 +69,20 @@ class _TypeNamespace:\n except ImportError:\n UfuncType = type(Numeric.sin)\n \n+NX_VERSION = 'Numeric %s' % Numeric.__version__\n+\n+\n+def asscalar(a):\n+ \"\"\"Returns Python scalar value corresponding to 'a' for rank-0 arrays\n+ or the unaltered array for non-rank-0.\"\"\"\n+ if len(a.shape) == 0:\n+ return a[0]\n+ else:\n+ return a\n+\n __all__ = []\n for k in globals().keys():\n if k[0] != \"_\":\n __all__.append(k)\n __all__.append(\"_insert\")\n __all__.append(\"_unique\")\n-\n", "added_lines": 11, "deleted_lines": 4, "source_code": "\"\"\"Imports from Numeric for numerix, the numarray/Numeric interchangeability\nmodule. These array functions are used when Numeric is chosen.\n\"\"\"\nimport Numeric\nfrom Numeric import *\n\nimport fastumath\nfrom fastumath import *\nfrom fastumath import PINF as inf\nfrom fastumath import alter_numeric, restore_numeric\n\nimport _compiled_base\nfrom _compiled_base import arraymap, _unique, _insert\n\nfrom ppimport import ppimport, ppimport_attr\n\nclass _TypeNamespace:\n \"\"\"Numeric compatible type aliases for use with extension functions.\"\"\"\n Int8 = Int8\n UInt8 = UInt8\n Int16 = Int16\n UInt16 = UInt16\n Int32 = Int32\n UInt32 = UInt32\n Float32 = Float32\n Float64 = Float64\n Complex32 = Complex32\n Complex64 = Complex64\n\nnx = _TypeNamespace()\n\n# inf is useful for testing infinities in results of array divisions\n# (which don't raise exceptions)\n\ninf = infty = Infinity = (array([1])/0.0)[0]\n\n# The following import statements are equivalent to\n#\n# from Matrix import Matrix as mat\n#\n# but avoids expensive LinearAlgebra import when\n# Matrix is not used.\n#\nLinearAlgebra = ppimport('LinearAlgebra')\ninverse = ppimport_attr(LinearAlgebra, 'inverse')\neigenvectors = ppimport_attr(LinearAlgebra, 'eigenvectors')\nMatrix = mat = ppimport_attr(ppimport('Matrix'), 'Matrix')\nfft = ppimport_attr(ppimport('FFT'), 'fft')\nRandomArray = ppimport('RandomArray')\nMLab = ppimport('MLab')\n\n#\n# Force numerix to use scipy_base.fastumath instead of numerix.umath.\n#\nimport sys as _sys\n_sys.modules['umath'] = fastumath\n\nif Numeric.__version__ < '23.5':\n matrixmultiply=dot\n\nInf = inf = fastumath.PINF\ntry:\n NAN = NaN = nan = fastumath.NAN\nexcept AttributeError:\n NaN = NAN = nan = fastumath.PINF/fastumath.PINF\n\ntry:\n from Numeric import UfuncType\nexcept ImportError:\n UfuncType = type(Numeric.sin)\n\nNX_VERSION = 'Numeric %s' % Numeric.__version__\n\n\ndef asscalar(a):\n \"\"\"Returns Python scalar value corresponding to 'a' for rank-0 arrays\n or the unaltered array for non-rank-0.\"\"\"\n if len(a.shape) == 0:\n return a[0]\n else:\n return a\n\n__all__ = []\nfor k in globals().keys():\n if k[0] != \"_\":\n __all__.append(k)\n__all__.append(\"_insert\")\n__all__.append(\"_unique\")\n", "source_code_before": "\"\"\"Imports from Numeric for numerix, the numarray/Numeric interchangeability\nmodule. These array functions are used when Numeric is chosen.\n\"\"\"\nimport Numeric\nfrom Numeric import *\n\nimport fastumath\nfrom fastumath import *\nfrom fastumath import PINF as inf\nfrom fastumath import alter_numeric, restore_numeric\n\nimport _compiled_base\nfrom _compiled_base import arraymap, _unique, _insert\n\nfrom ppimport import ppimport, ppimport_attr\n\nclass _TypeNamespace:\n \"\"\"Numeric compatible type aliases for use with extension functions.\"\"\"\n Int8 = Int8\n UInt8 = UInt8\n Int16 = Int16\n UInt16 = UInt16\n Int32 = Int32\n UInt32 = UInt32\n Float32 = Float32\n Float64 = Float64\n Complex32 = Complex32\n Complex64 = Complex64\n\nnx = _TypeNamespace()\n\n# inf is useful for testing infinities in results of array divisions\n# (which don't raise exceptions)\n\ninf = infty = Infinity = (array([1])/0.0)[0]\n\n# The following import statements are equivalent to\n#\n# from Matrix import Matrix as mat\n#\n# but avoids expensive LinearAlgebra import when\n# Matrix is not used.\n#\nLinearAlgebra = ppimport('LinearAlgebra')\ninverse = ppimport_attr(LinearAlgebra, 'inverse')\neigenvectors = ppimport_attr(LinearAlgebra, 'eigenvectors')\nMatrix = mat = ppimport_attr(ppimport('Matrix'), 'Matrix')\nfft = ppimport_attr(ppimport('FFT'), 'fft')\nRandomArray = ppimport('RandomArray')\nMLab = ppimport('MLab')\n\nNUMERIX_HEADER = \"Numeric/arrayobject.h\"\n\n#\n# Force numerix to use scipy_base.fastumath instead of numerix.umath.\n#\nimport sys as _sys\n_sys.modules['umath'] = fastumath\n\n\nif Numeric.__version__ < '23.5':\n matrixmultiply=dot\n\nInf = inf = fastumath.PINF\ntry:\n NAN = NaN = nan = fastumath.NAN\nexcept AttributeError:\n NaN = NAN = nan = fastumath.PINF/fastumath.PINF\n\ntry:\n from Numeric import UfuncType\nexcept ImportError:\n UfuncType = type(Numeric.sin)\n\n__all__ = []\nfor k in globals().keys():\n if k[0] != \"_\":\n __all__.append(k)\n__all__.append(\"_insert\")\n__all__.append(\"_unique\")\n\n", "methods": [ { "name": "asscalar", "long_name": "asscalar( a )", "filename": "_nc_imports.py", "nloc": 5, "complexity": 2, "token_count": 25, "parameters": [ "a" ], "start_line": 75, "end_line": 81, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 } ], "methods_before": [], "changed_methods": [ { "name": "asscalar", "long_name": "asscalar( a )", "filename": "_nc_imports.py", "nloc": 5, "complexity": 2, "token_count": 25, "parameters": [ "a" ], "start_line": 75, "end_line": 81, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 } ], "nloc": 58, "complexity": 2, "token_count": 306, "diff_parsed": { "added": [ "NX_VERSION = 'Numeric %s' % Numeric.__version__", "", "", "def asscalar(a):", " \"\"\"Returns Python scalar value corresponding to 'a' for rank-0 arrays", " or the unaltered array for non-rank-0.\"\"\"", " if len(a.shape) == 0:", " return a[0]", " else:", " return a", "" ], "deleted": [ "NUMERIX_HEADER = \"Numeric/arrayobject.h\"", "", "", "" ] } }, { "old_path": "scipy_base/function_base.py", "new_path": "scipy_base/function_base.py", "filename": "function_base.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -2,13 +2,14 @@\n import numerix as _nx\n from numerix import ravel, nonzero, array, choose, ones, zeros, \\\n sometrue, alltrue, reshape, alter_numeric, restore_numeric, arraymap, \\\n- pi, _insert, multiply, add, arctan2, maximum, minimum\n+ pi, _insert, multiply, add, arctan2, maximum, minimum, any, all\n from type_check import ScalarType, isscalar, asarray\n from shape_base import squeeze, atleast_1d\n \n-__all__ = ['round','any','all','logspace','linspace','fix','mod',\n- 'select','trim_zeros','amax','amin', 'alen', 'ptp','cumsum','take',\n- 'copy', 'prod','cumprod','diff','angle','unwrap','sort_complex',\n+__all__ = ['round','logspace','linspace','fix','mod',\n+ 'select','trim_zeros','amax','amin', 'alen',\n+ 'ptp','cumsum','take', 'copy',\n+ 'prod','cumprod','diff','angle','unwrap','sort_complex',\n 'disp','unique','extract','insert','nansum','nanmax','nanargmax',\n 'nanargmin','nanmin','sum','vectorize','asarray_chkfinite',\n 'alter_numeric', 'restore_numeric','isaltered']\n@@ -27,17 +28,6 @@ def asarray_chkfinite(x):\n raise ValueError, \"Array must not contain infs or nans.\"\n return x \n \n-def any(x):\n- \"\"\"Return true if any elements of x are true: sometrue(ravel(x))\n- \"\"\"\n- return sometrue(ravel(x))\n-\n-\n-def all(x):\n- \"\"\"Return true if all elements of x are true: alltrue(ravel(x))\n- \"\"\"\n- return alltrue(ravel(x))\n-\n # Need this to change array type for low precision values\n def sum(x,axis=0): # could change default axis here\n x = asarray(x)\n", "added_lines": 5, "deleted_lines": 15, "source_code": "import types\nimport numerix as _nx\nfrom numerix import ravel, nonzero, array, choose, ones, zeros, \\\n sometrue, alltrue, reshape, alter_numeric, restore_numeric, arraymap, \\\n pi, _insert, multiply, add, arctan2, maximum, minimum, any, all\nfrom type_check import ScalarType, isscalar, asarray\nfrom shape_base import squeeze, atleast_1d\n\n__all__ = ['round','logspace','linspace','fix','mod',\n 'select','trim_zeros','amax','amin', 'alen',\n 'ptp','cumsum','take', 'copy',\n 'prod','cumprod','diff','angle','unwrap','sort_complex',\n 'disp','unique','extract','insert','nansum','nanmax','nanargmax',\n 'nanargmin','nanmin','sum','vectorize','asarray_chkfinite',\n 'alter_numeric', 'restore_numeric','isaltered']\n\ndef isaltered():\n val = str(type(_nx.array([1])))\n return 'scipy' in val\n\nround = _nx.around\n\ndef asarray_chkfinite(x):\n \"\"\"Like asarray except it checks to be sure no NaNs or Infs are present.\n \"\"\"\n x = asarray(x)\n if not all(_nx.isfinite(x)):\n raise ValueError, \"Array must not contain infs or nans.\"\n return x \n\n# Need this to change array type for low precision values\ndef sum(x,axis=0): # could change default axis here\n x = asarray(x)\n if x.typecode() in ['1','s','b','w']:\n x = x.astype('l')\n return _nx.sum(x,axis)\n \n\ndef logspace(start,stop,num=50,endpoint=1):\n \"\"\" Evenly spaced samples on a logarithmic scale.\n\n Return num evenly spaced samples from 10**start to 10**stop. If\n endpoint=1 then last sample is 10**stop.\n \"\"\"\n if num <= 0: return array([])\n if endpoint:\n step = (stop-start)/float((num-1))\n y = _nx.arange(0,num) * step + start\n else:\n step = (stop-start)/float(num)\n y = _nx.arange(0,num) * step + start\n return _nx.power(10.0,y)\n\ndef linspace(start,stop,num=50,endpoint=1,retstep=0):\n \"\"\" Evenly spaced samples.\n \n Return num evenly spaced samples from start to stop. If endpoint=1 then\n last sample is stop. If retstep is 1 then return the step value used.\n \"\"\"\n if num <= 0: return array([])\n if endpoint:\n step = (stop-start)/float((num-1))\n y = _nx.arange(0,num) * step + start \n else:\n step = (stop-start)/float(num)\n y = _nx.arange(0,num) * step + start\n if retstep:\n return y, step\n else:\n return y\n\ndef fix(x):\n \"\"\" Round x to nearest integer towards zero.\n \"\"\"\n x = asarray(x)\n y = _nx.floor(x)\n return _nx.where(x<0,y+1,y)\n\ndef mod(x,y):\n \"\"\" x - y*floor(x/y)\n \n For numeric arrays, x % y has the same sign as x while\n mod(x,y) has the same sign as y.\n \"\"\"\n return x - y*_nx.floor(x*1.0/y)\n\ndef select(condlist, choicelist, default=0):\n \"\"\" Returns an array comprised from different elements of choicelist\n depending on the list of conditions.\n\n condlist is a list of condition arrays containing ones or zeros\n \n choicelist is a list of choice matrices (of the \"same\" size as the\n arrays in condlist). The result array has the \"same\" size as the\n arrays in choicelist. If condlist is [c0,...,cN-1] then choicelist\n must be of length N. The elements of the choicelist can then be\n represented as [v0,...,vN-1]. The default choice if none of the\n conditions are met is given as the default argument. \n \n The conditions are tested in order and the first one statisfied is\n used to select the choice. In other words, the elements of the\n output array are found from the following tree (notice the order of\n the conditions matters):\n \n if c0: v0\n elif c1: v1\n elif c2: v2\n ...\n elif cN-1: vN-1\n else: default\n \n Note, that one of the condition arrays must be large enough to handle\n the largest array in the choice list.\n \"\"\"\n n = len(condlist)\n n2 = len(choicelist)\n if n2 != n:\n raise ValueError, \"List of cases, must be same length as the list of conditions.\"\n choicelist.insert(0,default) \n S = 0\n pfac = 1\n for k in range(1,n+1):\n S += k * pfac * asarray(condlist[k-1])\n if k < n:\n pfac *= (1-asarray(condlist[k-1]))\n # handle special case of a 1-element condition but\n # a multi-element choice\n if type(S) in ScalarType or max(asarray(S).shape)==1:\n pfac = asarray(1)\n for k in range(n2+1):\n pfac = pfac + asarray(choicelist[k]) \n S = S*ones(asarray(pfac).shape)\n return choose(S, tuple(choicelist))\n\ndef _asarray1d(arr):\n \"\"\"Ensure 1d array for one array.\n \"\"\"\n m = asarray(arr)\n if len(m.shape)==0:\n m = reshape(m,(1,))\n return m\n\ndef copy(a):\n \"\"\"Return an array copy of the object.\n \"\"\"\n return array(a,copy=1)\n\ndef take(a, indices, axis=0):\n \"\"\"Selects the elements in indices from array a along given axis.\n \"\"\"\n try:\n a = _nx.take(a,indices,axis)\n except ValueError: # a is scalar\n pass\n return a\n\ndef _no_axis_is_all(function, m, axis):\n if axis is None:\n m = ravel(m)\n axis = 0\n else:\n m = _asarray1d(m)\n if _nx.which[0] == \"numeric\":\n r = function(m, axis)\n else:\n import numarray as _na\n _na.Error.pushMode(overflow=\"raise\")\n try:\n r = function(m, axis)\n finally:\n _na.Error.popMode()\n return r\n \n# Basic operations\ndef amax(m,axis=-1): \n \"\"\"Returns the maximum of m along dimension axis. \n \"\"\"\n return _no_axis_is_all(maximum.reduce, m, axis)\n\ndef amin(m,axis=-1):\n \"\"\"Returns the minimum of m along dimension axis.\n \"\"\"\n return _no_axis_is_all(minimum.reduce, m, axis)\n\ndef alen(m):\n \"\"\"Returns the length of a Python object interpreted as an array\n \"\"\"\n return len(asarray(m))\n\n# Actually from Basis, but it fits in so naturally here...\n\ndef _amin_amax(m, axis):\n return amax(m,axis)-amin(m,axis)\n\ndef ptp(m,axis=-1):\n \"\"\"Returns the maximum - minimum along the the given dimension\n \"\"\"\n return _no_axis_is_all(_amin_amax, m, axis)\n\ndef cumsum(m,axis=-1):\n \"\"\"Returns the cumulative sum of the elements along the given axis\n \"\"\"\n return _no_axis_is_all(add.accumulate, m, axis)\n\ndef prod(m,axis=-1):\n \"\"\"Returns the product of the elements along the given axis\n \"\"\"\n return _no_axis_is_all(multiply.reduce, m, axis)\n\ndef cumprod(m,axis=-1):\n \"\"\"Returns the cumulative product of the elments along the given axis\n \"\"\"\n return _no_axis_is_all(multiply.accumulate, m, axis)\n\ndef diff(x, n=1,axis=-1):\n \"\"\"Calculates the nth order, discrete difference along given axis.\n \"\"\"\n if n==0:\n return x\n if n<0:\n raise ValueError,'Order must be non-negative but got ' + `n`\n x = _asarray1d(x)\n nd = len(x.shape)\n slice1 = [slice(None)]*nd\n slice2 = [slice(None)]*nd\n slice1[axis] = slice(1,None)\n slice2[axis] = slice(None,-1)\n if n > 1:\n return diff(x[slice1]-x[slice2], n-1, axis=axis)\n else:\n return x[slice1]-x[slice2]\n \ndef angle(z,deg=0):\n \"\"\"Return the angle of complex argument z.\"\"\"\n if deg:\n fact = 180/pi\n else:\n fact = 1.0\n z = asarray(z)\n if z.typecode() in ['D','F']:\n zimag = z.imag\n zreal = z.real\n else:\n zimag = 0\n zreal = z\n return arctan2(zimag,zreal) * fact\n\ndef unwrap(p,discont=pi,axis=-1):\n \"\"\"unwraps radian phase p by changing absolute jumps greater than\n discont to their 2*pi complement along the given axis.\n \"\"\"\n p = asarray(p)\n nd = len(p.shape)\n dd = diff(p,axis=axis)\n slice1 = [slice(None,None)]*nd # full slices\n slice1[axis] = slice(1,None)\n ddmod = mod(dd+pi,2*pi)-pi\n _nx.putmask(ddmod,(ddmod==-pi) & (dd > 0),pi)\n ph_correct = ddmod - dd;\n _nx.putmask(ph_correct,abs(dd)>> import scipy\n >>> a = array((0,0,0,1,2,3,2,1,0))\n >>> scipy.trim_zeros(a)\n array([1, 2, 3, 2, 1])\n \"\"\"\n first = 0\n if 'f' in trim or 'F' in trim:\n for i in filt:\n if i != 0.: break\n else: first = first + 1\n last = len(filt)\n if 'b' in trim or 'B' in trim:\n for i in filt[::-1]:\n if i != 0.: break\n else: last = last - 1\n return filt[first:last]\n\ndef unique(inseq):\n \"\"\"Returns unique items in 1-dimensional sequence.\n \"\"\"\n set = {}\n for item in inseq:\n set[item] = None\n return asarray(set.keys())\n\ndef where(condition,x=None,y=None):\n \"\"\"If x and y are both None, then return the (1-d equivalent) indices\n where condition is true. Otherwise, return an array shaped like\n condition with elements of x and y in the places where condition is\n true or false respectively.\n \"\"\"\n if (x is None) and (y is None):\n # Needs work for multidimensional arrays\n return nonzero(ravel(condition))\n else:\n return choose(not_equal(condition, 0), (y,x))\n \ndef extract(condition, arr):\n \"\"\"Elements of ravel(condition) where ravel(condition) is true (1-d)\n\n Equivalent of compress(ravel(condition), ravel(arr))\n \"\"\"\n return _nx.take(ravel(arr), nonzero(ravel(condition)))\n\ndef insert(arr, mask, vals):\n \"\"\"Similar to putmask arr[mask] = vals but 1d array vals has the\n same number of elements as the non-zero values of mask. Inverse of extract.\n \"\"\"\n return _nx._insert(arr, mask, vals)\n\ndef nansum(x,axis=-1):\n \"\"\"Sum the array over the given axis treating nans as missing values.\n \"\"\"\n x = _asarray1d(x).copy()\n _nx.putmask(x,isnan(x),0)\n return _nx.sum(x,axis)\n\ndef nanmin(x,axis=-1):\n \"\"\"Find the minimium over the given axis ignoring nans.\n \"\"\"\n x = _asarray1d(x).copy()\n _nx.putmask(x,isnan(x),inf)\n return amin(x,axis)\n\ndef nanargmin(x,axis=-1):\n \"\"\"Find the indices of the minimium over the given axis ignoring nans.\n \"\"\"\n x = _asarray1d(x).copy()\n _nx.putmask(x,isnan(x),inf)\n return argmin(x,axis)\n \n\ndef nanmax(x,axis=-1):\n \"\"\"Find the maximum over the given axis ignoring nans.\n \"\"\"\n x = _asarray1d(x).copy()\n _nx.putmask(x,isnan(x),-inf)\n return amax(x,axis)\n\ndef nanargmax(x,axis=-1):\n \"\"\"Find the maximum over the given axis ignoring nans.\n \"\"\"\n x = _asarray1d(x).copy()\n _nx.putmask(x,isnan(x),-inf)\n return argmax(x,axis)\n\ndef disp(mesg, device=None, linefeed=1):\n \"\"\"Display a message to device (default is sys.stdout) with(out) linefeed.\n \"\"\"\n if device is None:\n import sys\n device = sys.stdout\n if linefeed:\n device.write('%s\\n' % mesg)\n else:\n device.write('%s' % mesg)\n device.flush()\n return\n\nclass vectorize:\n \"\"\"\n vectorize(somefunction) Generalized Function class.\n\n Description:\n \n Define a vectorized function which takes nested sequence\n objects or numerix arrays as inputs and returns a\n numerix array as output, evaluating the function over successive\n tuples of the input arrays like the python map function except it uses\n the broadcasting rules of numerix Python.\n\n Input:\n\n somefunction -- a Python function or method\n\n Example:\n\n def myfunc(a,b):\n if a > b:\n return a-b\n else\n return a+b\n\n vfunc = vectorize(myfunc)\n\n >>> vfunc([1,2,3,4],2)\n array([3,4,1,2])\n\n \"\"\"\n def __init__(self,pyfunc,otypes=None,doc=None):\n if not callable(pyfunc) or type(pyfunc) is types.ClassType:\n raise TypeError, \"Object is not a callable Python object.\"\n self.thefunc = pyfunc\n if doc is None:\n self.__doc__ = pyfunc.__doc__\n else:\n self.__doc__ = doc\n if otypes is None:\n self.otypes=''\n else:\n if isinstance(otypes,types.StringType):\n self.otypes=otypes\n else:\n raise ValueError, \"Output types must be a string.\"\n\n def __call__(self,*args):\n try:\n return squeeze(arraymap(self.thefunc,args,self.otypes))\n except IndexError:\n return self.zerocall(*args)\n\n def zerocall(self,*args):\n # one of the args was a zeros array\n # return zeros for each output\n # first --- find number of outputs\n # get it from self.otypes if possible\n # otherwise evaluate function at 0.9\n N = len(self.otypes)\n if N==1:\n return zeros((0,),'d')\n elif N !=0:\n return (zeros((0,),'d'),)*N\n newargs = []\n args = atleast_1d(args)\n for arg in args:\n if arg.typecode() != 'O':\n newargs.append(0.9)\n else:\n newargs.append(arg[0])\n newargs = tuple(newargs)\n try:\n res = self.thefunc(*newargs)\n except:\n raise ValueError, \"Zerocall is failing. \"\\\n \"Try using otypes in vectorize.\"\n if isscalar(res):\n return zeros((0,),'d')\n else:\n return (zeros((0,),'d'),)*len(res)\n\n", "source_code_before": "import types\nimport numerix as _nx\nfrom numerix import ravel, nonzero, array, choose, ones, zeros, \\\n sometrue, alltrue, reshape, alter_numeric, restore_numeric, arraymap, \\\n pi, _insert, multiply, add, arctan2, maximum, minimum\nfrom type_check import ScalarType, isscalar, asarray\nfrom shape_base import squeeze, atleast_1d\n\n__all__ = ['round','any','all','logspace','linspace','fix','mod',\n 'select','trim_zeros','amax','amin', 'alen', 'ptp','cumsum','take',\n 'copy', 'prod','cumprod','diff','angle','unwrap','sort_complex',\n 'disp','unique','extract','insert','nansum','nanmax','nanargmax',\n 'nanargmin','nanmin','sum','vectorize','asarray_chkfinite',\n 'alter_numeric', 'restore_numeric','isaltered']\n\ndef isaltered():\n val = str(type(_nx.array([1])))\n return 'scipy' in val\n\nround = _nx.around\n\ndef asarray_chkfinite(x):\n \"\"\"Like asarray except it checks to be sure no NaNs or Infs are present.\n \"\"\"\n x = asarray(x)\n if not all(_nx.isfinite(x)):\n raise ValueError, \"Array must not contain infs or nans.\"\n return x \n\ndef any(x):\n \"\"\"Return true if any elements of x are true: sometrue(ravel(x))\n \"\"\"\n return sometrue(ravel(x))\n\n\ndef all(x):\n \"\"\"Return true if all elements of x are true: alltrue(ravel(x))\n \"\"\"\n return alltrue(ravel(x))\n\n# Need this to change array type for low precision values\ndef sum(x,axis=0): # could change default axis here\n x = asarray(x)\n if x.typecode() in ['1','s','b','w']:\n x = x.astype('l')\n return _nx.sum(x,axis)\n \n\ndef logspace(start,stop,num=50,endpoint=1):\n \"\"\" Evenly spaced samples on a logarithmic scale.\n\n Return num evenly spaced samples from 10**start to 10**stop. If\n endpoint=1 then last sample is 10**stop.\n \"\"\"\n if num <= 0: return array([])\n if endpoint:\n step = (stop-start)/float((num-1))\n y = _nx.arange(0,num) * step + start\n else:\n step = (stop-start)/float(num)\n y = _nx.arange(0,num) * step + start\n return _nx.power(10.0,y)\n\ndef linspace(start,stop,num=50,endpoint=1,retstep=0):\n \"\"\" Evenly spaced samples.\n \n Return num evenly spaced samples from start to stop. If endpoint=1 then\n last sample is stop. If retstep is 1 then return the step value used.\n \"\"\"\n if num <= 0: return array([])\n if endpoint:\n step = (stop-start)/float((num-1))\n y = _nx.arange(0,num) * step + start \n else:\n step = (stop-start)/float(num)\n y = _nx.arange(0,num) * step + start\n if retstep:\n return y, step\n else:\n return y\n\ndef fix(x):\n \"\"\" Round x to nearest integer towards zero.\n \"\"\"\n x = asarray(x)\n y = _nx.floor(x)\n return _nx.where(x<0,y+1,y)\n\ndef mod(x,y):\n \"\"\" x - y*floor(x/y)\n \n For numeric arrays, x % y has the same sign as x while\n mod(x,y) has the same sign as y.\n \"\"\"\n return x - y*_nx.floor(x*1.0/y)\n\ndef select(condlist, choicelist, default=0):\n \"\"\" Returns an array comprised from different elements of choicelist\n depending on the list of conditions.\n\n condlist is a list of condition arrays containing ones or zeros\n \n choicelist is a list of choice matrices (of the \"same\" size as the\n arrays in condlist). The result array has the \"same\" size as the\n arrays in choicelist. If condlist is [c0,...,cN-1] then choicelist\n must be of length N. The elements of the choicelist can then be\n represented as [v0,...,vN-1]. The default choice if none of the\n conditions are met is given as the default argument. \n \n The conditions are tested in order and the first one statisfied is\n used to select the choice. In other words, the elements of the\n output array are found from the following tree (notice the order of\n the conditions matters):\n \n if c0: v0\n elif c1: v1\n elif c2: v2\n ...\n elif cN-1: vN-1\n else: default\n \n Note, that one of the condition arrays must be large enough to handle\n the largest array in the choice list.\n \"\"\"\n n = len(condlist)\n n2 = len(choicelist)\n if n2 != n:\n raise ValueError, \"List of cases, must be same length as the list of conditions.\"\n choicelist.insert(0,default) \n S = 0\n pfac = 1\n for k in range(1,n+1):\n S += k * pfac * asarray(condlist[k-1])\n if k < n:\n pfac *= (1-asarray(condlist[k-1]))\n # handle special case of a 1-element condition but\n # a multi-element choice\n if type(S) in ScalarType or max(asarray(S).shape)==1:\n pfac = asarray(1)\n for k in range(n2+1):\n pfac = pfac + asarray(choicelist[k]) \n S = S*ones(asarray(pfac).shape)\n return choose(S, tuple(choicelist))\n\ndef _asarray1d(arr):\n \"\"\"Ensure 1d array for one array.\n \"\"\"\n m = asarray(arr)\n if len(m.shape)==0:\n m = reshape(m,(1,))\n return m\n\ndef copy(a):\n \"\"\"Return an array copy of the object.\n \"\"\"\n return array(a,copy=1)\n\ndef take(a, indices, axis=0):\n \"\"\"Selects the elements in indices from array a along given axis.\n \"\"\"\n try:\n a = _nx.take(a,indices,axis)\n except ValueError: # a is scalar\n pass\n return a\n\ndef _no_axis_is_all(function, m, axis):\n if axis is None:\n m = ravel(m)\n axis = 0\n else:\n m = _asarray1d(m)\n if _nx.which[0] == \"numeric\":\n r = function(m, axis)\n else:\n import numarray as _na\n _na.Error.pushMode(overflow=\"raise\")\n try:\n r = function(m, axis)\n finally:\n _na.Error.popMode()\n return r\n \n# Basic operations\ndef amax(m,axis=-1): \n \"\"\"Returns the maximum of m along dimension axis. \n \"\"\"\n return _no_axis_is_all(maximum.reduce, m, axis)\n\ndef amin(m,axis=-1):\n \"\"\"Returns the minimum of m along dimension axis.\n \"\"\"\n return _no_axis_is_all(minimum.reduce, m, axis)\n\ndef alen(m):\n \"\"\"Returns the length of a Python object interpreted as an array\n \"\"\"\n return len(asarray(m))\n\n# Actually from Basis, but it fits in so naturally here...\n\ndef _amin_amax(m, axis):\n return amax(m,axis)-amin(m,axis)\n\ndef ptp(m,axis=-1):\n \"\"\"Returns the maximum - minimum along the the given dimension\n \"\"\"\n return _no_axis_is_all(_amin_amax, m, axis)\n\ndef cumsum(m,axis=-1):\n \"\"\"Returns the cumulative sum of the elements along the given axis\n \"\"\"\n return _no_axis_is_all(add.accumulate, m, axis)\n\ndef prod(m,axis=-1):\n \"\"\"Returns the product of the elements along the given axis\n \"\"\"\n return _no_axis_is_all(multiply.reduce, m, axis)\n\ndef cumprod(m,axis=-1):\n \"\"\"Returns the cumulative product of the elments along the given axis\n \"\"\"\n return _no_axis_is_all(multiply.accumulate, m, axis)\n\ndef diff(x, n=1,axis=-1):\n \"\"\"Calculates the nth order, discrete difference along given axis.\n \"\"\"\n if n==0:\n return x\n if n<0:\n raise ValueError,'Order must be non-negative but got ' + `n`\n x = _asarray1d(x)\n nd = len(x.shape)\n slice1 = [slice(None)]*nd\n slice2 = [slice(None)]*nd\n slice1[axis] = slice(1,None)\n slice2[axis] = slice(None,-1)\n if n > 1:\n return diff(x[slice1]-x[slice2], n-1, axis=axis)\n else:\n return x[slice1]-x[slice2]\n \ndef angle(z,deg=0):\n \"\"\"Return the angle of complex argument z.\"\"\"\n if deg:\n fact = 180/pi\n else:\n fact = 1.0\n z = asarray(z)\n if z.typecode() in ['D','F']:\n zimag = z.imag\n zreal = z.real\n else:\n zimag = 0\n zreal = z\n return arctan2(zimag,zreal) * fact\n\ndef unwrap(p,discont=pi,axis=-1):\n \"\"\"unwraps radian phase p by changing absolute jumps greater than\n discont to their 2*pi complement along the given axis.\n \"\"\"\n p = asarray(p)\n nd = len(p.shape)\n dd = diff(p,axis=axis)\n slice1 = [slice(None,None)]*nd # full slices\n slice1[axis] = slice(1,None)\n ddmod = mod(dd+pi,2*pi)-pi\n _nx.putmask(ddmod,(ddmod==-pi) & (dd > 0),pi)\n ph_correct = ddmod - dd;\n _nx.putmask(ph_correct,abs(dd)>> import scipy\n >>> a = array((0,0,0,1,2,3,2,1,0))\n >>> scipy.trim_zeros(a)\n array([1, 2, 3, 2, 1])\n \"\"\"\n first = 0\n if 'f' in trim or 'F' in trim:\n for i in filt:\n if i != 0.: break\n else: first = first + 1\n last = len(filt)\n if 'b' in trim or 'B' in trim:\n for i in filt[::-1]:\n if i != 0.: break\n else: last = last - 1\n return filt[first:last]\n\ndef unique(inseq):\n \"\"\"Returns unique items in 1-dimensional sequence.\n \"\"\"\n set = {}\n for item in inseq:\n set[item] = None\n return asarray(set.keys())\n\ndef where(condition,x=None,y=None):\n \"\"\"If x and y are both None, then return the (1-d equivalent) indices\n where condition is true. Otherwise, return an array shaped like\n condition with elements of x and y in the places where condition is\n true or false respectively.\n \"\"\"\n if (x is None) and (y is None):\n # Needs work for multidimensional arrays\n return nonzero(ravel(condition))\n else:\n return choose(not_equal(condition, 0), (y,x))\n \ndef extract(condition, arr):\n \"\"\"Elements of ravel(condition) where ravel(condition) is true (1-d)\n\n Equivalent of compress(ravel(condition), ravel(arr))\n \"\"\"\n return _nx.take(ravel(arr), nonzero(ravel(condition)))\n\ndef insert(arr, mask, vals):\n \"\"\"Similar to putmask arr[mask] = vals but 1d array vals has the\n same number of elements as the non-zero values of mask. Inverse of extract.\n \"\"\"\n return _nx._insert(arr, mask, vals)\n\ndef nansum(x,axis=-1):\n \"\"\"Sum the array over the given axis treating nans as missing values.\n \"\"\"\n x = _asarray1d(x).copy()\n _nx.putmask(x,isnan(x),0)\n return _nx.sum(x,axis)\n\ndef nanmin(x,axis=-1):\n \"\"\"Find the minimium over the given axis ignoring nans.\n \"\"\"\n x = _asarray1d(x).copy()\n _nx.putmask(x,isnan(x),inf)\n return amin(x,axis)\n\ndef nanargmin(x,axis=-1):\n \"\"\"Find the indices of the minimium over the given axis ignoring nans.\n \"\"\"\n x = _asarray1d(x).copy()\n _nx.putmask(x,isnan(x),inf)\n return argmin(x,axis)\n \n\ndef nanmax(x,axis=-1):\n \"\"\"Find the maximum over the given axis ignoring nans.\n \"\"\"\n x = _asarray1d(x).copy()\n _nx.putmask(x,isnan(x),-inf)\n return amax(x,axis)\n\ndef nanargmax(x,axis=-1):\n \"\"\"Find the maximum over the given axis ignoring nans.\n \"\"\"\n x = _asarray1d(x).copy()\n _nx.putmask(x,isnan(x),-inf)\n return argmax(x,axis)\n\ndef disp(mesg, device=None, linefeed=1):\n \"\"\"Display a message to device (default is sys.stdout) with(out) linefeed.\n \"\"\"\n if device is None:\n import sys\n device = sys.stdout\n if linefeed:\n device.write('%s\\n' % mesg)\n else:\n device.write('%s' % mesg)\n device.flush()\n return\n\nclass vectorize:\n \"\"\"\n vectorize(somefunction) Generalized Function class.\n\n Description:\n \n Define a vectorized function which takes nested sequence\n objects or numerix arrays as inputs and returns a\n numerix array as output, evaluating the function over successive\n tuples of the input arrays like the python map function except it uses\n the broadcasting rules of numerix Python.\n\n Input:\n\n somefunction -- a Python function or method\n\n Example:\n\n def myfunc(a,b):\n if a > b:\n return a-b\n else\n return a+b\n\n vfunc = vectorize(myfunc)\n\n >>> vfunc([1,2,3,4],2)\n array([3,4,1,2])\n\n \"\"\"\n def __init__(self,pyfunc,otypes=None,doc=None):\n if not callable(pyfunc) or type(pyfunc) is types.ClassType:\n raise TypeError, \"Object is not a callable Python object.\"\n self.thefunc = pyfunc\n if doc is None:\n self.__doc__ = pyfunc.__doc__\n else:\n self.__doc__ = doc\n if otypes is None:\n self.otypes=''\n else:\n if isinstance(otypes,types.StringType):\n self.otypes=otypes\n else:\n raise ValueError, \"Output types must be a string.\"\n\n def __call__(self,*args):\n try:\n return squeeze(arraymap(self.thefunc,args,self.otypes))\n except IndexError:\n return self.zerocall(*args)\n\n def zerocall(self,*args):\n # one of the args was a zeros array\n # return zeros for each output\n # first --- find number of outputs\n # get it from self.otypes if possible\n # otherwise evaluate function at 0.9\n N = len(self.otypes)\n if N==1:\n return zeros((0,),'d')\n elif N !=0:\n return (zeros((0,),'d'),)*N\n newargs = []\n args = atleast_1d(args)\n for arg in args:\n if arg.typecode() != 'O':\n newargs.append(0.9)\n else:\n newargs.append(arg[0])\n newargs = tuple(newargs)\n try:\n res = self.thefunc(*newargs)\n except:\n raise ValueError, \"Zerocall is failing. \"\\\n \"Try using otypes in vectorize.\"\n if isscalar(res):\n return zeros((0,),'d')\n else:\n return (zeros((0,),'d'),)*len(res)\n\n", "methods": [ { "name": "isaltered", "long_name": "isaltered( )", "filename": "function_base.py", "nloc": 3, "complexity": 1, "token_count": 24, "parameters": [], "start_line": 17, "end_line": 19, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "asarray_chkfinite", "long_name": "asarray_chkfinite( x )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 30, "parameters": [ "x" ], "start_line": 23, "end_line": 29, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "sum", "long_name": "sum( x , axis = 0 )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 49, "parameters": [ "x", "axis" ], "start_line": 32, "end_line": 36, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "logspace", "long_name": "logspace( start , stop , num = 50 , endpoint = 1 )", "filename": "function_base.py", "nloc": 9, "complexity": 3, "token_count": 99, "parameters": [ "start", "stop", "num", "endpoint" ], "start_line": 39, "end_line": 52, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 0 }, { "name": "linspace", "long_name": "linspace( start , stop , num = 50 , endpoint = 1 , retstep = 0 )", "filename": "function_base.py", "nloc": 12, "complexity": 4, "token_count": 103, "parameters": [ "start", "stop", "num", "endpoint", "retstep" ], "start_line": 54, "end_line": 70, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "fix", "long_name": "fix( x )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 35, "parameters": [ "x" ], "start_line": 72, "end_line": 77, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "mod", "long_name": "mod( x , y )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 25, "parameters": [ "x", "y" ], "start_line": 79, "end_line": 85, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "select", "long_name": "select( condlist , choicelist , default = 0 )", "filename": "function_base.py", "nloc": 18, "complexity": 7, "token_count": 165, "parameters": [ "condlist", "choicelist", "default" ], "start_line": 87, "end_line": 133, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 47, "top_nesting_level": 0 }, { "name": "_asarray1d", "long_name": "_asarray1d( arr )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 35, "parameters": [ "arr" ], "start_line": 135, "end_line": 141, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "copy", "long_name": "copy( a )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 15, "parameters": [ "a" ], "start_line": 143, "end_line": 146, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "take", "long_name": "take( a , indices , axis = 0 )", "filename": "function_base.py", "nloc": 6, "complexity": 2, "token_count": 32, "parameters": [ "a", "indices", "axis" ], "start_line": 148, "end_line": 155, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "_no_axis_is_all", "long_name": "_no_axis_is_all( function , m , axis )", "filename": "function_base.py", "nloc": 16, "complexity": 4, "token_count": 86, "parameters": [ "function", "m", "axis" ], "start_line": 157, "end_line": 172, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 0 }, { "name": "amax", "long_name": "amax( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 22, "parameters": [ "m", "axis" ], "start_line": 175, "end_line": 178, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "amin", "long_name": "amin( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 22, "parameters": [ "m", "axis" ], "start_line": 180, "end_line": 183, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "alen", "long_name": "alen( m )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "m" ], "start_line": 185, "end_line": 188, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "_amin_amax", "long_name": "_amin_amax( m , axis )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 21, "parameters": [ "m", "axis" ], "start_line": 192, "end_line": 193, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 0 }, { "name": "ptp", "long_name": "ptp( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "m", "axis" ], "start_line": 195, "end_line": 198, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "cumsum", "long_name": "cumsum( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 22, "parameters": [ "m", "axis" ], "start_line": 200, "end_line": 203, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "prod", "long_name": "prod( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 22, "parameters": [ "m", "axis" ], "start_line": 205, "end_line": 208, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "cumprod", "long_name": "cumprod( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 22, "parameters": [ "m", "axis" ], "start_line": 210, "end_line": 213, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "diff", "long_name": "diff( x , n = 1 , axis = - 1 )", "filename": "function_base.py", "nloc": 15, "complexity": 4, "token_count": 130, "parameters": [ "x", "n", "axis" ], "start_line": 215, "end_line": 231, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "angle", "long_name": "angle( z , deg = 0 )", "filename": "function_base.py", "nloc": 13, "complexity": 3, "token_count": 71, "parameters": [ "z", "deg" ], "start_line": 233, "end_line": 246, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 0 }, { "name": "unwrap", "long_name": "unwrap( p , discont = pi , axis = - 1 )", "filename": "function_base.py", "nloc": 13, "complexity": 1, "token_count": 150, "parameters": [ "p", "discont", "axis" ], "start_line": 248, "end_line": 263, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 0 }, { "name": "sort_complex.complex_cmp", "long_name": "sort_complex.complex_cmp( x , y )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 38, "parameters": [ "x", "y" ], "start_line": 269, "end_line": 273, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "sort_complex", "long_name": "sort_complex( a )", "filename": "function_base.py", "nloc": 6, "complexity": 1, "token_count": 44, "parameters": [ "a" ], "start_line": 265, "end_line": 276, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 }, { "name": "trim_zeros", "long_name": "trim_zeros( filt , trim = 'fb' )", "filename": "function_base.py", "nloc": 12, "complexity": 9, "token_count": 87, "parameters": [ "filt", "trim" ], "start_line": 278, "end_line": 297, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "unique", "long_name": "unique( inseq )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 30, "parameters": [ "inseq" ], "start_line": 299, "end_line": 305, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "where", "long_name": "where( condition , x = None , y = None )", "filename": "function_base.py", "nloc": 5, "complexity": 3, "token_count": 53, "parameters": [ "condition", "x", "y" ], "start_line": 307, "end_line": 317, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 0 }, { "name": "extract", "long_name": "extract( condition , arr )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 26, "parameters": [ "condition", "arr" ], "start_line": 319, "end_line": 324, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "insert", "long_name": "insert( arr , mask , vals )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 21, "parameters": [ "arr", "mask", "vals" ], "start_line": 326, "end_line": 330, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "nansum", "long_name": "nansum( x , axis = - 1 )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 43, "parameters": [ "x", "axis" ], "start_line": 332, "end_line": 337, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "nanmin", "long_name": "nanmin( x , axis = - 1 )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 41, "parameters": [ "x", "axis" ], "start_line": 339, "end_line": 344, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "nanargmin", "long_name": "nanargmin( x , axis = - 1 )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 41, "parameters": [ "x", "axis" ], "start_line": 346, "end_line": 351, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "nanmax", "long_name": "nanmax( x , axis = - 1 )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 42, "parameters": [ "x", "axis" ], "start_line": 354, "end_line": 359, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "nanargmax", "long_name": "nanargmax( x , axis = - 1 )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 42, "parameters": [ "x", "axis" ], "start_line": 361, "end_line": 366, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "disp", "long_name": "disp( mesg , device = None , linefeed = 1 )", "filename": "function_base.py", "nloc": 10, "complexity": 3, "token_count": 53, "parameters": [ "mesg", "device", "linefeed" ], "start_line": 368, "end_line": 379, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , pyfunc , otypes = None , doc = None )", "filename": "function_base.py", "nloc": 15, "complexity": 6, "token_count": 92, "parameters": [ "self", "pyfunc", "otypes", "doc" ], "start_line": 411, "end_line": 425, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 15, "top_nesting_level": 1 }, { "name": "__call__", "long_name": "__call__( self , * args )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 37, "parameters": [ "self", "args" ], "start_line": 427, "end_line": 431, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "zerocall", "long_name": "zerocall( self , * args )", "filename": "function_base.py", "nloc": 23, "complexity": 7, "token_count": 155, "parameters": [ "self", "args" ], "start_line": 433, "end_line": 460, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 28, "top_nesting_level": 1 } ], "methods_before": [ { "name": "isaltered", "long_name": "isaltered( )", "filename": "function_base.py", "nloc": 3, "complexity": 1, "token_count": 24, "parameters": [], "start_line": 16, "end_line": 18, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "asarray_chkfinite", "long_name": "asarray_chkfinite( x )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 30, "parameters": [ "x" ], "start_line": 22, "end_line": 28, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "any", "long_name": "any( x )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 30, "end_line": 33, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "all", "long_name": "all( x )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 36, "end_line": 39, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "sum", "long_name": "sum( x , axis = 0 )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 49, "parameters": [ "x", "axis" ], "start_line": 42, "end_line": 46, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "logspace", "long_name": "logspace( start , stop , num = 50 , endpoint = 1 )", "filename": "function_base.py", "nloc": 9, "complexity": 3, "token_count": 99, "parameters": [ "start", "stop", "num", "endpoint" ], "start_line": 49, "end_line": 62, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 0 }, { "name": "linspace", "long_name": "linspace( start , stop , num = 50 , endpoint = 1 , retstep = 0 )", "filename": "function_base.py", "nloc": 12, "complexity": 4, "token_count": 103, "parameters": [ "start", "stop", "num", "endpoint", "retstep" ], "start_line": 64, "end_line": 80, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "fix", "long_name": "fix( x )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 35, "parameters": [ "x" ], "start_line": 82, "end_line": 87, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "mod", "long_name": "mod( x , y )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 25, "parameters": [ "x", "y" ], "start_line": 89, "end_line": 95, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "select", "long_name": "select( condlist , choicelist , default = 0 )", "filename": "function_base.py", "nloc": 18, "complexity": 7, "token_count": 165, "parameters": [ "condlist", "choicelist", "default" ], "start_line": 97, "end_line": 143, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 47, "top_nesting_level": 0 }, { "name": "_asarray1d", "long_name": "_asarray1d( arr )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 35, "parameters": [ "arr" ], "start_line": 145, "end_line": 151, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "copy", "long_name": "copy( a )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 15, "parameters": [ "a" ], "start_line": 153, "end_line": 156, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "take", "long_name": "take( a , indices , axis = 0 )", "filename": "function_base.py", "nloc": 6, "complexity": 2, "token_count": 32, "parameters": [ "a", "indices", "axis" ], "start_line": 158, "end_line": 165, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "_no_axis_is_all", "long_name": "_no_axis_is_all( function , m , axis )", "filename": "function_base.py", "nloc": 16, "complexity": 4, "token_count": 86, "parameters": [ "function", "m", "axis" ], "start_line": 167, "end_line": 182, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 0 }, { "name": "amax", "long_name": "amax( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 22, "parameters": [ "m", "axis" ], "start_line": 185, "end_line": 188, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "amin", "long_name": "amin( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 22, "parameters": [ "m", "axis" ], "start_line": 190, "end_line": 193, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "alen", "long_name": "alen( m )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "m" ], "start_line": 195, "end_line": 198, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "_amin_amax", "long_name": "_amin_amax( m , axis )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 21, "parameters": [ "m", "axis" ], "start_line": 202, "end_line": 203, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 0 }, { "name": "ptp", "long_name": "ptp( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 20, "parameters": [ "m", "axis" ], "start_line": 205, "end_line": 208, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "cumsum", "long_name": "cumsum( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 22, "parameters": [ "m", "axis" ], "start_line": 210, "end_line": 213, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "prod", "long_name": "prod( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 22, "parameters": [ "m", "axis" ], "start_line": 215, "end_line": 218, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "cumprod", "long_name": "cumprod( m , axis = - 1 )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 22, "parameters": [ "m", "axis" ], "start_line": 220, "end_line": 223, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "diff", "long_name": "diff( x , n = 1 , axis = - 1 )", "filename": "function_base.py", "nloc": 15, "complexity": 4, "token_count": 130, "parameters": [ "x", "n", "axis" ], "start_line": 225, "end_line": 241, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "angle", "long_name": "angle( z , deg = 0 )", "filename": "function_base.py", "nloc": 13, "complexity": 3, "token_count": 71, "parameters": [ "z", "deg" ], "start_line": 243, "end_line": 256, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 14, "top_nesting_level": 0 }, { "name": "unwrap", "long_name": "unwrap( p , discont = pi , axis = - 1 )", "filename": "function_base.py", "nloc": 13, "complexity": 1, "token_count": 150, "parameters": [ "p", "discont", "axis" ], "start_line": 258, "end_line": 273, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 16, "top_nesting_level": 0 }, { "name": "sort_complex.complex_cmp", "long_name": "sort_complex.complex_cmp( x , y )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 38, "parameters": [ "x", "y" ], "start_line": 279, "end_line": 283, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "sort_complex", "long_name": "sort_complex( a )", "filename": "function_base.py", "nloc": 6, "complexity": 1, "token_count": 44, "parameters": [ "a" ], "start_line": 275, "end_line": 286, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 }, { "name": "trim_zeros", "long_name": "trim_zeros( filt , trim = 'fb' )", "filename": "function_base.py", "nloc": 12, "complexity": 9, "token_count": 87, "parameters": [ "filt", "trim" ], "start_line": 288, "end_line": 307, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "unique", "long_name": "unique( inseq )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 30, "parameters": [ "inseq" ], "start_line": 309, "end_line": 315, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 0 }, { "name": "where", "long_name": "where( condition , x = None , y = None )", "filename": "function_base.py", "nloc": 5, "complexity": 3, "token_count": 53, "parameters": [ "condition", "x", "y" ], "start_line": 317, "end_line": 327, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 0 }, { "name": "extract", "long_name": "extract( condition , arr )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 26, "parameters": [ "condition", "arr" ], "start_line": 329, "end_line": 334, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "insert", "long_name": "insert( arr , mask , vals )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 21, "parameters": [ "arr", "mask", "vals" ], "start_line": 336, "end_line": 340, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "nansum", "long_name": "nansum( x , axis = - 1 )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 43, "parameters": [ "x", "axis" ], "start_line": 342, "end_line": 347, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "nanmin", "long_name": "nanmin( x , axis = - 1 )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 41, "parameters": [ "x", "axis" ], "start_line": 349, "end_line": 354, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "nanargmin", "long_name": "nanargmin( x , axis = - 1 )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 41, "parameters": [ "x", "axis" ], "start_line": 356, "end_line": 361, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "nanmax", "long_name": "nanmax( x , axis = - 1 )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 42, "parameters": [ "x", "axis" ], "start_line": 364, "end_line": 369, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "nanargmax", "long_name": "nanargmax( x , axis = - 1 )", "filename": "function_base.py", "nloc": 4, "complexity": 1, "token_count": 42, "parameters": [ "x", "axis" ], "start_line": 371, "end_line": 376, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "disp", "long_name": "disp( mesg , device = None , linefeed = 1 )", "filename": "function_base.py", "nloc": 10, "complexity": 3, "token_count": 53, "parameters": [ "mesg", "device", "linefeed" ], "start_line": 378, "end_line": 389, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , pyfunc , otypes = None , doc = None )", "filename": "function_base.py", "nloc": 15, "complexity": 6, "token_count": 92, "parameters": [ "self", "pyfunc", "otypes", "doc" ], "start_line": 421, "end_line": 435, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 15, "top_nesting_level": 1 }, { "name": "__call__", "long_name": "__call__( self , * args )", "filename": "function_base.py", "nloc": 5, "complexity": 2, "token_count": 37, "parameters": [ "self", "args" ], "start_line": 437, "end_line": 441, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "zerocall", "long_name": "zerocall( self , * args )", "filename": "function_base.py", "nloc": 23, "complexity": 7, "token_count": 155, "parameters": [ "self", "args" ], "start_line": 443, "end_line": 470, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 28, "top_nesting_level": 1 } ], "changed_methods": [ { "name": "all", "long_name": "all( x )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 36, "end_line": 39, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "any", "long_name": "any( x )", "filename": "function_base.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 30, "end_line": 33, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 } ], "nloc": 298, "complexity": 88, "token_count": 2244, "diff_parsed": { "added": [ " pi, _insert, multiply, add, arctan2, maximum, minimum, any, all", "__all__ = ['round','logspace','linspace','fix','mod',", " 'select','trim_zeros','amax','amin', 'alen',", " 'ptp','cumsum','take', 'copy',", " 'prod','cumprod','diff','angle','unwrap','sort_complex'," ], "deleted": [ " pi, _insert, multiply, add, arctan2, maximum, minimum", "__all__ = ['round','any','all','logspace','linspace','fix','mod',", " 'select','trim_zeros','amax','amin', 'alen', 'ptp','cumsum','take',", " 'copy', 'prod','cumprod','diff','angle','unwrap','sort_complex',", "def any(x):", " \"\"\"Return true if any elements of x are true: sometrue(ravel(x))", " \"\"\"", " return sometrue(ravel(x))", "", "", "def all(x):", " \"\"\"Return true if all elements of x are true: alltrue(ravel(x))", " \"\"\"", " return alltrue(ravel(x))", "" ] } }, { "old_path": "scipy_base/machar.py", "new_path": "scipy_base/machar.py", "filename": "machar.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -7,8 +7,7 @@\n \n __all__ = ['MachAr','machar_double','machar_single']\n \n-from numerix import array\n-from function_base import any\n+from numerix import array, any\n \n class MachAr:\n \"\"\"Diagnosing machine parameters.\n", "added_lines": 1, "deleted_lines": 2, "source_code": "#\n# Machine arithmetics - determine the parameters of the\n# floating-point arithmetic system\n#\n# Author: Pearu Peterson, September 2003\n#\n\n__all__ = ['MachAr','machar_double','machar_single']\n\nfrom numerix import array, any\n\nclass MachAr:\n \"\"\"Diagnosing machine parameters.\n\n The following attributes are available:\n\n ibeta - radix in which numbers are represented\n it - number of base-ibeta digits in the floating point mantissa M\n machep - exponent of the smallest (most negative) power of ibeta that,\n added to 1.0,\n gives something different from 1.0\n eps - floating-point number beta**machep (floating point precision)\n negep - exponent of the smallest power of ibeta that, substracted\n from 1.0, gives something different from 1.0\n epsneg - floating-point number beta**negep\n iexp - number of bits in the exponent (including its sign and bias)\n minexp - smallest (most negative) power of ibeta consistent with there\n being no leading zeros in the mantissa\n xmin - floating point number beta**minexp (the smallest (in\n magnitude) usable floating value)\n maxexp - smallest (positive) power of ibeta that causes overflow\n xmax - (1-epsneg)* beta**maxexp (the largest (in magnitude)\n usable floating value)\n irnd - in range(6), information on what kind of rounding is done\n in addition, and on how underflow is handled\n ngrd - number of 'guard digits' used when truncating the product\n of two mantissas to fit the representation\n\n epsilon - same as eps\n tiny - same as xmin\n huge - same as xmax\n precision - int(-log10(eps))\n resolution - 10**(-precision)\n\n Reference:\n Numerical Recipies.\n \"\"\"\n def __init__(self,\n float_conv=float,\n int_conv=int,\n float_to_float=float,\n float_to_str = lambda v:'%24.16e' % v,\n title = 'Python floating point number',\n ):\n \"\"\"\n float_conv - convert integer to float (array)\n int_conv - convert float (array) to integer\n float_to_float - convert float array to float\n float_to_str - convert array float to str\n title - description of used floating point numbers\n \"\"\"\n one = float_conv(1)\n two = one + one\n zero = one - one\n\n # Determine ibeta and beta\n a = one\n while 1:\n a = a + a\n temp = a + one\n temp1 = temp - a\n if any(temp1 - one != zero):\n break\n b = one\n while 1:\n b = b + b\n temp = a + b\n itemp = int_conv(temp-a)\n if any(itemp != 0):\n break\n ibeta = itemp\n beta = float_conv(ibeta)\n\n # Determine it and irnd\n it = 0\n b = one\n while 1:\n it = it + 1\n b = b * beta\n temp = b + one\n temp1 = temp - b\n if any(temp1 - one != zero):\n break\n\n betah = beta / two\n a = one\n while 1:\n a = a + a\n temp = a + one\n temp1 = temp - a\n if any(temp1 - one != zero):\n break\n temp = a + betah\n irnd = 0\n if any(temp-a != zero):\n irnd = 1\n tempa = a + beta\n temp = tempa + betah\n if irnd==0 and any(temp-tempa != zero):\n irnd = 2\n\n # Determine negep and epsneg\n negep = it + 3\n betain = one / beta\n a = one\n for i in range(negep):\n a = a * betain\n b = a\n while 1:\n temp = one - a\n if any(temp-one != zero):\n break\n a = a * beta\n negep = negep - 1\n negep = -negep\n epsneg = a\n\n # Determine machep and eps\n machep = - it - 3\n a = b\n\n while 1:\n temp = one + a\n if any(temp-one != zero):\n break\n a = a * beta\n machep = machep + 1\n eps = a\n\n # Determine ngrd\n ngrd = 0\n temp = one + eps\n if irnd==0 and any(temp*one - one != zero):\n ngrd = 1\n\n # Determine iexp\n i = 0\n k = 1\n z = betain\n t = one + eps\n nxres = 0\n while 1:\n y = z\n z = y*y\n a = z*one # Check here for underflow\n temp = z*t\n if any(a+a == zero) or any(abs(z)>=y):\n break\n temp1 = temp * betain\n if any(temp1*beta == z):\n break\n i = i + 1\n k = k + k\n if ibeta != 10:\n iexp = i + 1\n mx = k + k\n else:\n iexp = 2\n iz = ibeta\n while k >= iz:\n iz = iz * ibeta\n iexp = iexp + 1\n mx = iz + iz - 1\n\n # Determine minexp and xmin\n while 1:\n xmin = y\n y = y * betain\n a = y * one\n temp = y * t\n if any(a+a != zero) and any(abs(y) < xmin):\n k = k + 1\n temp1 = temp * betain\n if any(temp1*beta == y) and any(temp != y):\n nxres = 3\n xmin = y\n break\n else:\n break\n minexp = -k\n\n # Determine maxexp, xmax\n if mx <= k + k - 3 and ibeta != 10:\n mx = mx + mx\n iexp = iexp + 1\n maxexp = mx + minexp\n irnd = irnd + nxres\n if irnd >= 2:\n maxexp = maxexp - 2\n i = maxexp + minexp\n if ibeta == 2 and not i:\n maxexp = maxexp - 1\n if i > 20:\n maxexp = maxexp - 1\n if any(a != y):\n maxexp = maxexp - 2\n xmax = one - epsneg\n if any(xmax*one != xmax):\n xmax = one - beta*epsneg\n xmax = xmax / (xmin*beta*beta*beta)\n i = maxexp + minexp + 3\n for j in range(i):\n if ibeta==2:\n xmax = xmax + xmax\n else:\n xmax = xmax * beta\n\n self.ibeta = ibeta\n self.it = it\n self.negep = negep\n self.epsneg = float_to_float(epsneg)\n self._str_epsneg = float_to_str(epsneg)\n self.machep = machep\n self.eps = float_to_float(eps)\n self._str_eps = float_to_str(eps)\n self.ngrd = ngrd\n self.iexp = iexp\n self.minexp = minexp\n self.xmin = float_to_float(xmin)\n self._str_xmin = float_to_str(xmin)\n self.maxexp = maxexp\n self.xmax = float_to_float(xmax)\n self._str_xmax = float_to_str(xmax)\n self.irnd = irnd\n\n self.title = title\n # Commonly used parameters\n self.epsilon = self.eps\n self.tiny = self.xmin\n self.huge = self.xmax\n\n import math\n self.precision = int(-math.log10(float_to_float(self.eps)))\n ten = two + two + two + two + two\n resolution = ten ** (-self.precision)\n self.resolution = float_to_float(resolution)\n\n def __str__(self):\n return '''\\\nMachine parameters for %(title)s\n---------------------------------------------------------------------\nibeta=%(ibeta)s it=%(it)s iexp=%(iexp)s ngrd=%(ngrd)s irnd=%(irnd)s\nmachep=%(machep)s eps=%(_str_eps)s (beta**machep == epsilon)\nnegep =%(negep)s epsneg=%(_str_epsneg)s (beta**epsneg)\nminexp=%(minexp)s xmin=%(_str_xmin)s (beta**minexp == tiny)\nmaxexp=%(maxexp)s xmax=%(_str_xmax)s ((1-epsneg)*beta**maxexp == huge)\n---------------------------------------------------------------------\n''' % self.__dict__\n\ndef frz(a):\n \"\"\"fix rank-0 --> rank-1\"\"\"\n if len(a.shape) == 0:\n a = a.copy()\n a.shape = (1,)\n return a\n\nmachar_double = MachAr(lambda v:array([v],'d'),\n lambda v:frz(v.astype('i'))[0],\n lambda v:array(frz(v)[0],'d'),\n lambda v:'%24.16e' % array(frz(v)[0],'d'),\n 'numerix double precision floating point number')\n\nmachar_single = MachAr(lambda v:array([v],'f'),\n lambda v:frz(v.astype('i'))[0],\n lambda v:array(frz(v)[0],'f'), #\n lambda v:'%15.7e' % array(frz(v)[0],'f'),\n 'numerix single precision floating point number')\n\nif __name__ == '__main__':\n print MachAr()\n print machar_double\n print machar_single\n", "source_code_before": "#\n# Machine arithmetics - determine the parameters of the\n# floating-point arithmetic system\n#\n# Author: Pearu Peterson, September 2003\n#\n\n__all__ = ['MachAr','machar_double','machar_single']\n\nfrom numerix import array\nfrom function_base import any\n\nclass MachAr:\n \"\"\"Diagnosing machine parameters.\n\n The following attributes are available:\n\n ibeta - radix in which numbers are represented\n it - number of base-ibeta digits in the floating point mantissa M\n machep - exponent of the smallest (most negative) power of ibeta that,\n added to 1.0,\n gives something different from 1.0\n eps - floating-point number beta**machep (floating point precision)\n negep - exponent of the smallest power of ibeta that, substracted\n from 1.0, gives something different from 1.0\n epsneg - floating-point number beta**negep\n iexp - number of bits in the exponent (including its sign and bias)\n minexp - smallest (most negative) power of ibeta consistent with there\n being no leading zeros in the mantissa\n xmin - floating point number beta**minexp (the smallest (in\n magnitude) usable floating value)\n maxexp - smallest (positive) power of ibeta that causes overflow\n xmax - (1-epsneg)* beta**maxexp (the largest (in magnitude)\n usable floating value)\n irnd - in range(6), information on what kind of rounding is done\n in addition, and on how underflow is handled\n ngrd - number of 'guard digits' used when truncating the product\n of two mantissas to fit the representation\n\n epsilon - same as eps\n tiny - same as xmin\n huge - same as xmax\n precision - int(-log10(eps))\n resolution - 10**(-precision)\n\n Reference:\n Numerical Recipies.\n \"\"\"\n def __init__(self,\n float_conv=float,\n int_conv=int,\n float_to_float=float,\n float_to_str = lambda v:'%24.16e' % v,\n title = 'Python floating point number',\n ):\n \"\"\"\n float_conv - convert integer to float (array)\n int_conv - convert float (array) to integer\n float_to_float - convert float array to float\n float_to_str - convert array float to str\n title - description of used floating point numbers\n \"\"\"\n one = float_conv(1)\n two = one + one\n zero = one - one\n\n # Determine ibeta and beta\n a = one\n while 1:\n a = a + a\n temp = a + one\n temp1 = temp - a\n if any(temp1 - one != zero):\n break\n b = one\n while 1:\n b = b + b\n temp = a + b\n itemp = int_conv(temp-a)\n if any(itemp != 0):\n break\n ibeta = itemp\n beta = float_conv(ibeta)\n\n # Determine it and irnd\n it = 0\n b = one\n while 1:\n it = it + 1\n b = b * beta\n temp = b + one\n temp1 = temp - b\n if any(temp1 - one != zero):\n break\n\n betah = beta / two\n a = one\n while 1:\n a = a + a\n temp = a + one\n temp1 = temp - a\n if any(temp1 - one != zero):\n break\n temp = a + betah\n irnd = 0\n if any(temp-a != zero):\n irnd = 1\n tempa = a + beta\n temp = tempa + betah\n if irnd==0 and any(temp-tempa != zero):\n irnd = 2\n\n # Determine negep and epsneg\n negep = it + 3\n betain = one / beta\n a = one\n for i in range(negep):\n a = a * betain\n b = a\n while 1:\n temp = one - a\n if any(temp-one != zero):\n break\n a = a * beta\n negep = negep - 1\n negep = -negep\n epsneg = a\n\n # Determine machep and eps\n machep = - it - 3\n a = b\n\n while 1:\n temp = one + a\n if any(temp-one != zero):\n break\n a = a * beta\n machep = machep + 1\n eps = a\n\n # Determine ngrd\n ngrd = 0\n temp = one + eps\n if irnd==0 and any(temp*one - one != zero):\n ngrd = 1\n\n # Determine iexp\n i = 0\n k = 1\n z = betain\n t = one + eps\n nxres = 0\n while 1:\n y = z\n z = y*y\n a = z*one # Check here for underflow\n temp = z*t\n if any(a+a == zero) or any(abs(z)>=y):\n break\n temp1 = temp * betain\n if any(temp1*beta == z):\n break\n i = i + 1\n k = k + k\n if ibeta != 10:\n iexp = i + 1\n mx = k + k\n else:\n iexp = 2\n iz = ibeta\n while k >= iz:\n iz = iz * ibeta\n iexp = iexp + 1\n mx = iz + iz - 1\n\n # Determine minexp and xmin\n while 1:\n xmin = y\n y = y * betain\n a = y * one\n temp = y * t\n if any(a+a != zero) and any(abs(y) < xmin):\n k = k + 1\n temp1 = temp * betain\n if any(temp1*beta == y) and any(temp != y):\n nxres = 3\n xmin = y\n break\n else:\n break\n minexp = -k\n\n # Determine maxexp, xmax\n if mx <= k + k - 3 and ibeta != 10:\n mx = mx + mx\n iexp = iexp + 1\n maxexp = mx + minexp\n irnd = irnd + nxres\n if irnd >= 2:\n maxexp = maxexp - 2\n i = maxexp + minexp\n if ibeta == 2 and not i:\n maxexp = maxexp - 1\n if i > 20:\n maxexp = maxexp - 1\n if any(a != y):\n maxexp = maxexp - 2\n xmax = one - epsneg\n if any(xmax*one != xmax):\n xmax = one - beta*epsneg\n xmax = xmax / (xmin*beta*beta*beta)\n i = maxexp + minexp + 3\n for j in range(i):\n if ibeta==2:\n xmax = xmax + xmax\n else:\n xmax = xmax * beta\n\n self.ibeta = ibeta\n self.it = it\n self.negep = negep\n self.epsneg = float_to_float(epsneg)\n self._str_epsneg = float_to_str(epsneg)\n self.machep = machep\n self.eps = float_to_float(eps)\n self._str_eps = float_to_str(eps)\n self.ngrd = ngrd\n self.iexp = iexp\n self.minexp = minexp\n self.xmin = float_to_float(xmin)\n self._str_xmin = float_to_str(xmin)\n self.maxexp = maxexp\n self.xmax = float_to_float(xmax)\n self._str_xmax = float_to_str(xmax)\n self.irnd = irnd\n\n self.title = title\n # Commonly used parameters\n self.epsilon = self.eps\n self.tiny = self.xmin\n self.huge = self.xmax\n\n import math\n self.precision = int(-math.log10(float_to_float(self.eps)))\n ten = two + two + two + two + two\n resolution = ten ** (-self.precision)\n self.resolution = float_to_float(resolution)\n\n def __str__(self):\n return '''\\\nMachine parameters for %(title)s\n---------------------------------------------------------------------\nibeta=%(ibeta)s it=%(it)s iexp=%(iexp)s ngrd=%(ngrd)s irnd=%(irnd)s\nmachep=%(machep)s eps=%(_str_eps)s (beta**machep == epsilon)\nnegep =%(negep)s epsneg=%(_str_epsneg)s (beta**epsneg)\nminexp=%(minexp)s xmin=%(_str_xmin)s (beta**minexp == tiny)\nmaxexp=%(maxexp)s xmax=%(_str_xmax)s ((1-epsneg)*beta**maxexp == huge)\n---------------------------------------------------------------------\n''' % self.__dict__\n\ndef frz(a):\n \"\"\"fix rank-0 --> rank-1\"\"\"\n if len(a.shape) == 0:\n a = a.copy()\n a.shape = (1,)\n return a\n\nmachar_double = MachAr(lambda v:array([v],'d'),\n lambda v:frz(v.astype('i'))[0],\n lambda v:array(frz(v)[0],'d'),\n lambda v:'%24.16e' % array(frz(v)[0],'d'),\n 'numerix double precision floating point number')\n\nmachar_single = MachAr(lambda v:array([v],'f'),\n lambda v:frz(v.astype('i'))[0],\n lambda v:array(frz(v)[0],'f'), #\n lambda v:'%15.7e' % array(frz(v)[0],'f'),\n 'numerix single precision floating point number')\n\nif __name__ == '__main__':\n print MachAr()\n print machar_double\n print machar_single\n", "methods": [ { "name": "__init__", "long_name": "__init__( self , float_conv = float , int_conv = int , float_to_float = float , float_to_str = lambda v : '%24.16e' % v , title = 'Python floating point number' , )", "filename": "machar.py", "nloc": 170, "complexity": 40, "token_count": 928, "parameters": [ "self", "float_conv", "int_conv", "float_to_float", "float_to_str", "title" ], "start_line": 48, "end_line": 246, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 199, "top_nesting_level": 1 }, { "name": "__str__", "long_name": "__str__( self )", "filename": "machar.py", "nloc": 11, "complexity": 1, "token_count": 11, "parameters": [ "self" ], "start_line": 248, "end_line": 258, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 }, { "name": "frz", "long_name": "frz( a )", "filename": "machar.py", "nloc": 5, "complexity": 2, "token_count": 33, "parameters": [ "a" ], "start_line": 260, "end_line": 265, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 } ], "methods_before": [ { "name": "__init__", "long_name": "__init__( self , float_conv = float , int_conv = int , float_to_float = float , float_to_str = lambda v : '%24.16e' % v , title = 'Python floating point number' , )", "filename": "machar.py", "nloc": 170, "complexity": 40, "token_count": 928, "parameters": [ "self", "float_conv", "int_conv", "float_to_float", "float_to_str", "title" ], "start_line": 49, "end_line": 247, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 199, "top_nesting_level": 1 }, { "name": "__str__", "long_name": "__str__( self )", "filename": "machar.py", "nloc": 11, "complexity": 1, "token_count": 11, "parameters": [ "self" ], "start_line": 249, "end_line": 259, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 }, { "name": "frz", "long_name": "frz( a )", "filename": "machar.py", "nloc": 5, "complexity": 2, "token_count": 33, "parameters": [ "a" ], "start_line": 261, "end_line": 266, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 } ], "changed_methods": [], "nloc": 238, "complexity": 43, "token_count": 1143, "diff_parsed": { "added": [ "from numerix import array, any" ], "deleted": [ "from numerix import array", "from function_base import any" ] } }, { "old_path": "scipy_base/numerix.py", "new_path": "scipy_base/numerix.py", "filename": "numerix.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -30,6 +30,7 @@\n if a in [\"--Numeric\", \"--numeric\", \"--NUMERIC\",\n \"--Numarray\", \"--numarray\", \"--NUMARRAY\"]:\n which = a[2:], \"command line\"\n+ sys.argv.remove(a)\n break\n del a\n \n@@ -54,16 +55,16 @@\n if which[0] == \"numarray\":\n from _na_imports import *\n import numarray\n- version = 'numarray %s'%numarray.__version__\n-\n elif which[0] == \"numeric\":\n from _nc_imports import *\n import Numeric\n- version = 'Numeric %s'%Numeric.__version__\n else:\n raise RuntimeError(\"invalid numerix selector\")\n \n-print 'numerix %s'%version\n+# Tweak the environment for f2py/scipy_distutils, e.g. setenv NUMERIC 1\n+os.environ[ which[0].upper() ] = \"1\" \n+\n+print 'numerix %s'% NX_VERSION\n \n # ---------------------------------------------------------------\n # Common imports and fixes\n@@ -72,7 +73,16 @@\n # a bug fix for blas numeric suggested by Fernando Perez\n matrixmultiply=dot\n \n-from function_base import any, all\n+def any(x):\n+ \"\"\"Return true if any elements of x are true: sometrue(ravel(x))\n+ \"\"\"\n+ return sometrue(ravel(x))\n+\n+\n+def all(x):\n+ \"\"\"Return true if all elements of x are true: alltrue(ravel(x))\n+ \"\"\"\n+ return alltrue(ravel(x))\n \n def _import_fail_message(module, version):\n \"\"\"Prints a message when the array package specific version of an extension\n", "added_lines": 15, "deleted_lines": 5, "source_code": "\"\"\"numerix imports either Numeric or numarray based on various selectors.\n\n0. If the value \"--numarray\" or \"--Numeric\" is specified on the\ncommand line, then numerix imports the specified array package.\n\n1. If the environment variable NUMERIX exists, it's value is used to\nchoose Numeric or numarray.\n\n2. The value of numerix in ~/.matplotlibrc: either Numeric or numarray\n\n\n3. If none of the above is done, the default array package is Numeric.\nBecause the .matplotlibrc always provides *some* value for numerix (it\nhas it's own system of default values), this default is most likely\nnever used.\n\nTo summarize: the commandline is examined first, the rc file second,\nand the default array package is Numeric. \n\"\"\"\n\nimport sys, os\n# from matplotlib import rcParams, verbose\n\nwhich = None, None\n\n# First, see if --numarray or --Numeric was specified on the command\n# line:\nif hasattr(sys, 'argv'): #Once again, Apache mod_python has no argv\n for a in sys.argv:\n if a in [\"--Numeric\", \"--numeric\", \"--NUMERIC\",\n \"--Numarray\", \"--numarray\", \"--NUMARRAY\"]:\n which = a[2:], \"command line\"\n sys.argv.remove(a)\n break\n del a\n\nif os.getenv(\"NUMERIX\"):\n which = os.getenv(\"NUMERIX\"), \"environment var\"\n\n# if which[0] is None: \n# try: # In theory, rcParams always has *some* value for numerix.\n# which = rcParams['numerix'], \"rc\"\n# except KeyError:\n# pass\n\n# If all the above fail, default to Numeric.\nif which[0] is None:\n which = \"numeric\", \"defaulted\"\n\nwhich = which[0].strip().lower(), which[1]\nif which[0] not in [\"numeric\", \"numarray\"]:\n verbose.report_error(__doc__)\n raise ValueError(\"numerix selector must be either 'Numeric' or 'numarray' but the value obtained from the %s was '%s'.\" % (which[1], which[0]))\n\nif which[0] == \"numarray\":\n from _na_imports import *\n import numarray\nelif which[0] == \"numeric\":\n from _nc_imports import *\n import Numeric\nelse:\n raise RuntimeError(\"invalid numerix selector\")\n\n# Tweak the environment for f2py/scipy_distutils, e.g. setenv NUMERIC 1\nos.environ[ which[0].upper() ] = \"1\" \n\nprint 'numerix %s'% NX_VERSION\n\n# ---------------------------------------------------------------\n# Common imports and fixes\n# ---------------------------------------------------------------\n\n# a bug fix for blas numeric suggested by Fernando Perez\nmatrixmultiply=dot\n\ndef any(x):\n \"\"\"Return true if any elements of x are true: sometrue(ravel(x))\n \"\"\"\n return sometrue(ravel(x))\n\n\ndef all(x):\n \"\"\"Return true if all elements of x are true: alltrue(ravel(x))\n \"\"\"\n return alltrue(ravel(x))\n\ndef _import_fail_message(module, version):\n \"\"\"Prints a message when the array package specific version of an extension\n fails to import correctly.\n \"\"\"\n _dict = { \"which\" : which[0],\n \"module\" : module,\n \"specific\" : version + module\n }\n print \"\"\"\\nThe import of the %(which)s version of the %(module)s module, %(specific)s, failed.\\nThis is either because %(which)s was unavailable when scipy was compiled,\\nor because a dependency of %(specific)s could not be satisfied.\\nIf it appears that %(specific)s was not built, make sure you have a working copy of\\n%(which)s and then re-install scipy. Otherwise, the following traceback gives more details:\\n\"\"\" % _dict\n", "source_code_before": "\"\"\"numerix imports either Numeric or numarray based on various selectors.\n\n0. If the value \"--numarray\" or \"--Numeric\" is specified on the\ncommand line, then numerix imports the specified array package.\n\n1. If the environment variable NUMERIX exists, it's value is used to\nchoose Numeric or numarray.\n\n2. The value of numerix in ~/.matplotlibrc: either Numeric or numarray\n\n\n3. If none of the above is done, the default array package is Numeric.\nBecause the .matplotlibrc always provides *some* value for numerix (it\nhas it's own system of default values), this default is most likely\nnever used.\n\nTo summarize: the commandline is examined first, the rc file second,\nand the default array package is Numeric. \n\"\"\"\n\nimport sys, os\n# from matplotlib import rcParams, verbose\n\nwhich = None, None\n\n# First, see if --numarray or --Numeric was specified on the command\n# line:\nif hasattr(sys, 'argv'): #Once again, Apache mod_python has no argv\n for a in sys.argv:\n if a in [\"--Numeric\", \"--numeric\", \"--NUMERIC\",\n \"--Numarray\", \"--numarray\", \"--NUMARRAY\"]:\n which = a[2:], \"command line\"\n break\n del a\n\nif os.getenv(\"NUMERIX\"):\n which = os.getenv(\"NUMERIX\"), \"environment var\"\n\n# if which[0] is None: \n# try: # In theory, rcParams always has *some* value for numerix.\n# which = rcParams['numerix'], \"rc\"\n# except KeyError:\n# pass\n\n# If all the above fail, default to Numeric.\nif which[0] is None:\n which = \"numeric\", \"defaulted\"\n\nwhich = which[0].strip().lower(), which[1]\nif which[0] not in [\"numeric\", \"numarray\"]:\n verbose.report_error(__doc__)\n raise ValueError(\"numerix selector must be either 'Numeric' or 'numarray' but the value obtained from the %s was '%s'.\" % (which[1], which[0]))\n\nif which[0] == \"numarray\":\n from _na_imports import *\n import numarray\n version = 'numarray %s'%numarray.__version__\n\nelif which[0] == \"numeric\":\n from _nc_imports import *\n import Numeric\n version = 'Numeric %s'%Numeric.__version__\nelse:\n raise RuntimeError(\"invalid numerix selector\")\n\nprint 'numerix %s'%version\n\n# ---------------------------------------------------------------\n# Common imports and fixes\n# ---------------------------------------------------------------\n\n# a bug fix for blas numeric suggested by Fernando Perez\nmatrixmultiply=dot\n\nfrom function_base import any, all\n\ndef _import_fail_message(module, version):\n \"\"\"Prints a message when the array package specific version of an extension\n fails to import correctly.\n \"\"\"\n _dict = { \"which\" : which[0],\n \"module\" : module,\n \"specific\" : version + module\n }\n print \"\"\"\\nThe import of the %(which)s version of the %(module)s module, %(specific)s, failed.\\nThis is either because %(which)s was unavailable when scipy was compiled,\\nor because a dependency of %(specific)s could not be satisfied.\\nIf it appears that %(specific)s was not built, make sure you have a working copy of\\n%(which)s and then re-install scipy. Otherwise, the following traceback gives more details:\\n\"\"\" % _dict\n", "methods": [ { "name": "any", "long_name": "any( x )", "filename": "numerix.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 76, "end_line": 79, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "all", "long_name": "all( x )", "filename": "numerix.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 82, "end_line": 85, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "_import_fail_message", "long_name": "_import_fail_message( module , version )", "filename": "numerix.py", "nloc": 6, "complexity": 1, "token_count": 32, "parameters": [ "module", "version" ], "start_line": 87, "end_line": 95, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 0 } ], "methods_before": [ { "name": "_import_fail_message", "long_name": "_import_fail_message( module , version )", "filename": "numerix.py", "nloc": 6, "complexity": 1, "token_count": 32, "parameters": [ "module", "version" ], "start_line": 77, "end_line": 85, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "all", "long_name": "all( x )", "filename": "numerix.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 82, "end_line": 85, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "any", "long_name": "any( x )", "filename": "numerix.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 76, "end_line": 79, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 } ], "nloc": 58, "complexity": 3, "token_count": 268, "diff_parsed": { "added": [ " sys.argv.remove(a)", "# Tweak the environment for f2py/scipy_distutils, e.g. setenv NUMERIC 1", "os.environ[ which[0].upper() ] = \"1\"", "", "print 'numerix %s'% NX_VERSION", "def any(x):", " \"\"\"Return true if any elements of x are true: sometrue(ravel(x))", " \"\"\"", " return sometrue(ravel(x))", "", "", "def all(x):", " \"\"\"Return true if all elements of x are true: alltrue(ravel(x))", " \"\"\"", " return alltrue(ravel(x))" ], "deleted": [ " version = 'numarray %s'%numarray.__version__", "", " version = 'Numeric %s'%Numeric.__version__", "print 'numerix %s'%version", "from function_base import any, all" ] } }, { "old_path": "scipy_base/polynomial.py", "new_path": "scipy_base/polynomial.py", "filename": "polynomial.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -19,13 +19,7 @@ def get_eigval_func():\n import linalg\n eigvals = linalg.eigvals\n except ImportError:\n- try:\n- import LinearAlgebra\n- eigvals = LinearAlgebra.eigenvalues\n- except:\n- raise ImportError, \\\n- \"You must have scipy.linalg or LinearAlgebra to \"\\\n- \"use this function.\"\n+ from numerix import eigenvalues as eigvals\n return eigvals\n \n def poly(seq_of_zeros):\n", "added_lines": 1, "deleted_lines": 7, "source_code": "import numerix as _nx\nfrom numerix import *\nfrom scimath import *\n\nfrom type_check import isscalar, asarray\nfrom matrix_base import diag\nfrom shape_base import hstack, atleast_1d\nfrom function_base import trim_zeros, sort_complex\n\n__all__ = ['poly','roots','polyint','polyder','polyadd','polysub','polymul',\n 'polydiv','polyval','poly1d']\n \ndef get_eigval_func():\n try:\n import scipy.linalg\n eigvals = scipy.linalg.eigvals\n except ImportError:\n try:\n import linalg\n eigvals = linalg.eigvals\n except ImportError:\n from numerix import eigenvalues as eigvals\n return eigvals\n\ndef poly(seq_of_zeros):\n \"\"\" Return a sequence representing a polynomial given a sequence of roots.\n\n If the input is a matrix, return the characteristic polynomial.\n \n Example:\n \n >>> b = roots([1,3,1,5,6])\n >>> poly(b)\n array([1., 3., 1., 5., 6.])\n \"\"\"\n seq_of_zeros = atleast_1d(seq_of_zeros) \n sh = shape(seq_of_zeros)\n if len(sh) == 2 and sh[0] == sh[1]:\n eig = get_eigval_func()\n seq_of_zeros=eig(seq_of_zeros)\n elif len(sh) ==1:\n pass\n else:\n raise ValueError, \"input must be 1d or square 2d array.\"\n\n if len(seq_of_zeros) == 0:\n return 1.0\n\n a = [1]\n for k in range(len(seq_of_zeros)):\n a = convolve(a,[1, -seq_of_zeros[k]], mode=2)\n \n if a.typecode() in ['F','D']:\n # if complex roots are all complex conjugates, the roots are real.\n roots = asarray(seq_of_zeros,'D')\n pos_roots = sort_complex(compress(roots.imag > 0,roots))\n neg_roots = conjugate(sort_complex(compress(roots.imag < 0,roots)))\n if (len(pos_roots) == len(neg_roots) and\n alltrue(neg_roots == pos_roots)):\n a = a.real.copy()\n\n return a\n\ndef roots(p):\n \"\"\" Return the roots of the polynomial coefficients in p.\n\n The values in the rank-1 array p are coefficients of a polynomial.\n If the length of p is n+1 then the polynomial is\n p[0] * x**n + p[1] * x**(n-1) + ... + p[n-1]*x + p[n]\n \"\"\"\n # If input is scalar, this makes it an array\n eig = get_eigval_func()\n p = atleast_1d(p)\n if len(p.shape) != 1:\n raise ValueError,\"Input must be a rank-1 array.\"\n \n # find non-zero array entries\n non_zero = nonzero(ravel(p))\n\n # find the number of trailing zeros -- this is the number of roots at 0.\n trailing_zeros = len(p) - non_zero[-1] - 1\n\n # strip leading and trailing zeros\n p = p[int(non_zero[0]):int(non_zero[-1])+1]\n \n # casting: if incoming array isn't floating point, make it floating point.\n if p.typecode() not in ['f','d','F','D']:\n p = p.astype('d')\n\n N = len(p)\n if N > 1:\n # build companion matrix and find its eigenvalues (the roots)\n A = diag(ones((N-2,),p.typecode()),-1)\n A[0,:] = -p[1:] / p[0]\n roots = eig(A)\n else:\n return array([])\n\n # tack any zeros onto the back of the array \n roots = hstack((roots,zeros(trailing_zeros,roots.typecode())))\n return roots\n\ndef polyint(p,m=1,k=None):\n \"\"\"Return the mth analytical integral of the polynomial p.\n\n If k is None, then zero-valued constants of integration are used.\n otherwise, k should be a list of length m (or a scalar if m=1) to\n represent the constants of integration to use for each integration\n (starting with k[0])\n \"\"\"\n m = int(m)\n if m < 0:\n raise ValueError, \"Order of integral must be positive (see polyder)\"\n if k is None:\n k = _nx.zeros(m)\n k = atleast_1d(k)\n if len(k) == 1 and m > 1:\n k = k[0]*_nx.ones(m)\n if len(k) < m:\n raise ValueError, \\\n \"k must be a scalar or a rank-1 array of length 1 or >m.\"\n if m == 0:\n return p\n else:\n truepoly = isinstance(p,poly1d)\n p = asarray(p)\n y = _nx.zeros(len(p)+1,'d')\n y[:-1] = p*1.0/_nx.arange(len(p),0,-1)\n y[-1] = k[0] \n val = polyint(y,m-1,k=k[1:])\n if truepoly:\n val = poly1d(val)\n return val\n \ndef polyder(p,m=1):\n \"\"\"Return the mth derivative of the polynomial p.\n \"\"\"\n m = int(m)\n truepoly = isinstance(p,poly1d)\n p = asarray(p)\n n = len(p)-1\n y = p[:-1] * _nx.arange(n,0,-1)\n if m < 0:\n raise ValueError, \"Order of derivative must be positive (see polyint)\"\n if m == 0:\n return p\n else:\n val = polyder(y,m-1)\n if truepoly:\n val = poly1d(val)\n return val\n\ndef polyval(p,x):\n \"\"\"Evaluate the polynomial p at x. If x is a polynomial then composition.\n\n Description:\n\n If p is of length N, this function returns the value:\n p[0]*(x**N-1) + p[1]*(x**N-2) + ... + p[N-2]*x + p[N-1]\n\n x can be a sequence and p(x) will be returned for all elements of x.\n or x can be another polynomial and the composite polynomial p(x) will be\n returned.\n \"\"\"\n p = asarray(p)\n if isinstance(x,poly1d):\n y = 0\n else:\n x = asarray(x)\n y = _nx.zeros(x.shape,x.typecode())\n for i in range(len(p)):\n y = x * y + p[i]\n return y\n\ndef polyadd(a1,a2):\n \"\"\"Adds two polynomials represented as lists\n \"\"\"\n truepoly = (isinstance(a1,poly1d) or isinstance(a2,poly1d))\n a1,a2 = map(atleast_1d,(a1,a2))\n diff = len(a2) - len(a1)\n if diff == 0:\n return a1 + a2\n elif diff > 0:\n zr = _nx.zeros(diff)\n val = _nx.concatenate((zr,a1)) + a2\n else:\n zr = _nx.zeros(abs(diff))\n val = a1 + _nx.concatenate((zr,a2))\n if truepoly:\n val = poly1d(val)\n return val\n\ndef polysub(a1,a2):\n \"\"\"Subtracts two polynomials represented as lists\n \"\"\"\n truepoly = (isinstance(a1,poly1d) or isinstance(a2,poly1d))\n a1,a2 = map(atleast_1d,(a1,a2))\n diff = len(a2) - len(a1)\n if diff == 0:\n return a1 - a2\n elif diff > 0:\n zr = _nx.zeros(diff)\n val = _nx.concatenate((zr,a1)) - a2\n else:\n zr = _nx.zeros(abs(diff))\n val = a1 - _nx.concatenate((zr,a2))\n if truepoly:\n val = poly1d(val)\n return val\n\n\ndef polymul(a1,a2):\n \"\"\"Multiplies two polynomials represented as lists.\n \"\"\"\n truepoly = (isinstance(a1,poly1d) or isinstance(a2,poly1d))\n val = _nx.convolve(a1,a2)\n if truepoly:\n val = poly1d(val)\n return val\n\ndef polydiv(a1,a2):\n \"\"\"Computes q and r polynomials so that a1(s) = q(s)*a2(s) + r(s)\n \"\"\"\n truepoly = (isinstance(a1,poly1d) or isinstance(a2,poly1d))\n q, r = deconvolve(a1,a2)\n while _nx.allclose(r[0], 0, rtol=1e-14) and (r.shape[-1] > 1):\n r = r[1:]\n if truepoly:\n q, r = map(poly1d,(q,r))\n return q, r\n\ndef deconvolve(signal, divisor):\n \"\"\"Deconvolves divisor out of signal.\n \"\"\"\n try:\n import scipy.signal\n except:\n print \"You need scipy.signal to use this function.\"\n num = atleast_1d(signal)\n den = atleast_1d(divisor)\n N = len(num)\n D = len(den)\n if D > N:\n quot = [];\n rem = num;\n else:\n input = _nx.ones(N-D+1,_nx.Float)\n input[1:] = 0\n quot = scipy.signal.lfilter(num, den, input)\n rem = num - _nx.convolve(den,quot,mode=2)\n return quot, rem\n\nimport re\n_poly_mat = re.compile(r\"[*][*]([0-9]*)\")\ndef _raise_power(astr, wrap=70):\n n = 0\n line1 = ''\n line2 = ''\n output = ' '\n while 1:\n mat = _poly_mat.search(astr,n)\n if mat is None:\n break\n span = mat.span()\n power = mat.groups()[0]\n partstr = astr[n:span[0]]\n n = span[1]\n toadd2 = partstr + ' '*(len(power)-1)\n toadd1 = ' '*(len(partstr)-1) + power\n if ((len(line2)+len(toadd2) > wrap) or \\\n (len(line1)+len(toadd1) > wrap)):\n output += line1 + \"\\n\" + line2 + \"\\n \"\n line1 = toadd1\n line2 = toadd2\n else: \n line2 += partstr + ' '*(len(power)-1)\n line1 += ' '*(len(partstr)-1) + power\n output += line1 + \"\\n\" + line2\n return output + astr[n:]\n \n \nclass poly1d:\n \"\"\"A one-dimensional polynomial class.\n\n p = poly1d([1,2,3]) constructs the polynomial x**2 + 2 x + 3\n\n p(0.5) evaluates the polynomial at the location\n p.r is a list of roots\n p.c is the coefficient array [1,2,3]\n p.order is the polynomial order (after leading zeros in p.c are removed)\n p[k] is the coefficient on the kth power of x (backwards from\n sequencing the coefficient array.\n\n polynomials can be added, substracted, multplied and divided (returns\n quotient and remainder).\n asarray(p) will also give the coefficient array, so polynomials can\n be used in all functions that accept arrays.\n \"\"\"\n def __init__(self, c_or_r, r=0):\n if isinstance(c_or_r,poly1d):\n for key in c_or_r.__dict__.keys():\n self.__dict__[key] = c_or_r.__dict__[key]\n return\n if r:\n c_or_r = poly(c_or_r)\n c_or_r = atleast_1d(c_or_r)\n if len(c_or_r.shape) > 1:\n raise ValueError, \"Polynomial must be 1d only.\"\n c_or_r = trim_zeros(c_or_r, trim='f')\n if len(c_or_r) == 0:\n c_or_r = _nx.array([0])\n self.__dict__['coeffs'] = c_or_r\n self.__dict__['order'] = len(c_or_r) - 1\n\n def __array__(self,t=None):\n if t:\n return asarray(self.coeffs,t)\n else:\n return asarray(self.coeffs)\n\n def __coerce__(self,other):\n return None\n \n def __repr__(self):\n vals = repr(self.coeffs)\n vals = vals[6:-1]\n return \"poly1d(%s)\" % vals\n\n def __len__(self):\n return self.order\n\n def __str__(self):\n N = self.order\n thestr = \"0\"\n for k in range(len(self.coeffs)):\n coefstr ='%.4g' % abs(self.coeffs[k])\n if coefstr[-4:] == '0000':\n coefstr = coefstr[:-5]\n power = (N-k)\n if power == 0:\n if coefstr != '0':\n newstr = '%s' % (coefstr,)\n else:\n if k == 0:\n newstr = '0'\n else:\n newstr = ''\n elif power == 1:\n if coefstr == '0':\n newstr = ''\n elif coefstr == '1':\n newstr = 'x'\n else: \n newstr = '%s x' % (coefstr,)\n else:\n if coefstr == '0':\n newstr = ''\n elif coefstr == '1':\n newstr = 'x**%d' % (power,)\n else: \n newstr = '%s x**%d' % (coefstr, power)\n\n if k > 0:\n if newstr != '':\n if self.coeffs[k] < 0:\n thestr = \"%s - %s\" % (thestr, newstr)\n else:\n thestr = \"%s + %s\" % (thestr, newstr)\n elif (k == 0) and (newstr != '') and (self.coeffs[k] < 0):\n thestr = \"-%s\" % (newstr,)\n else:\n thestr = newstr\n return _raise_power(thestr)\n \n\n def __call__(self, val):\n return polyval(self.coeffs, val)\n\n def __mul__(self, other):\n if isscalar(other):\n return poly1d(self.coeffs * other)\n else:\n other = poly1d(other)\n return poly1d(polymul(self.coeffs, other.coeffs))\n\n def __rmul__(self, other):\n if isscalar(other):\n return poly1d(other * self.coeffs)\n else:\n other = poly1d(other)\n return poly1d(polymul(self.coeffs, other.coeffs)) \n \n def __add__(self, other):\n other = poly1d(other)\n return poly1d(polyadd(self.coeffs, other.coeffs)) \n \n def __radd__(self, other):\n other = poly1d(other)\n return poly1d(polyadd(self.coeffs, other.coeffs))\n\n def __pow__(self, val):\n if not isscalar(val) or int(val) != val or val < 0:\n raise ValueError, \"Power to non-negative integers only.\"\n res = [1]\n for k in range(val):\n res = polymul(self.coeffs, res)\n return poly1d(res)\n\n def __sub__(self, other):\n other = poly1d(other)\n return poly1d(polysub(self.coeffs, other.coeffs))\n\n def __rsub__(self, other):\n other = poly1d(other)\n return poly1d(polysub(other.coeffs, self.coeffs))\n\n def __div__(self, other):\n if isscalar(other):\n return poly1d(self.coeffs/other)\n else:\n other = poly1d(other)\n return map(poly1d,polydiv(self.coeffs, other.coeffs))\n\n def __rdiv__(self, other):\n if isscalar(other):\n return poly1d(other/self.coeffs)\n else:\n other = poly1d(other)\n return map(poly1d,polydiv(other.coeffs, self.coeffs))\n\n def __setattr__(self, key, val):\n raise ValueError, \"Attributes cannot be changed this way.\"\n\n def __getattr__(self, key):\n if key in ['r','roots']:\n return roots(self.coeffs)\n elif key in ['c','coef','coefficients']:\n return self.coeffs\n elif key in ['o']:\n return self.order\n else:\n return self.__dict__[key]\n \n def __getitem__(self, val):\n ind = self.order - val\n if val > self.order:\n return 0\n if val < 0:\n return 0\n return self.coeffs[ind]\n\n def __setitem__(self, key, val):\n ind = self.order - key\n if key < 0:\n raise ValueError, \"Does not support negative powers.\"\n if key > self.order:\n zr = _nx.zeros(key-self.order,self.coeffs.typecode())\n self.__dict__['coeffs'] = _nx.concatenate((zr,self.coeffs))\n self.__dict__['order'] = key\n ind = 0\n self.__dict__['coeffs'][ind] = val\n return\n\n def integ(self, m=1, k=0):\n return poly1d(polyint(self.coeffs,m=m,k=k))\n\n def deriv(self, m=1):\n return poly1d(polyder(self.coeffs,m=m))\n", "source_code_before": "import numerix as _nx\nfrom numerix import *\nfrom scimath import *\n\nfrom type_check import isscalar, asarray\nfrom matrix_base import diag\nfrom shape_base import hstack, atleast_1d\nfrom function_base import trim_zeros, sort_complex\n\n__all__ = ['poly','roots','polyint','polyder','polyadd','polysub','polymul',\n 'polydiv','polyval','poly1d']\n \ndef get_eigval_func():\n try:\n import scipy.linalg\n eigvals = scipy.linalg.eigvals\n except ImportError:\n try:\n import linalg\n eigvals = linalg.eigvals\n except ImportError:\n try:\n import LinearAlgebra\n eigvals = LinearAlgebra.eigenvalues\n except:\n raise ImportError, \\\n \"You must have scipy.linalg or LinearAlgebra to \"\\\n \"use this function.\"\n return eigvals\n\ndef poly(seq_of_zeros):\n \"\"\" Return a sequence representing a polynomial given a sequence of roots.\n\n If the input is a matrix, return the characteristic polynomial.\n \n Example:\n \n >>> b = roots([1,3,1,5,6])\n >>> poly(b)\n array([1., 3., 1., 5., 6.])\n \"\"\"\n seq_of_zeros = atleast_1d(seq_of_zeros) \n sh = shape(seq_of_zeros)\n if len(sh) == 2 and sh[0] == sh[1]:\n eig = get_eigval_func()\n seq_of_zeros=eig(seq_of_zeros)\n elif len(sh) ==1:\n pass\n else:\n raise ValueError, \"input must be 1d or square 2d array.\"\n\n if len(seq_of_zeros) == 0:\n return 1.0\n\n a = [1]\n for k in range(len(seq_of_zeros)):\n a = convolve(a,[1, -seq_of_zeros[k]], mode=2)\n \n if a.typecode() in ['F','D']:\n # if complex roots are all complex conjugates, the roots are real.\n roots = asarray(seq_of_zeros,'D')\n pos_roots = sort_complex(compress(roots.imag > 0,roots))\n neg_roots = conjugate(sort_complex(compress(roots.imag < 0,roots)))\n if (len(pos_roots) == len(neg_roots) and\n alltrue(neg_roots == pos_roots)):\n a = a.real.copy()\n\n return a\n\ndef roots(p):\n \"\"\" Return the roots of the polynomial coefficients in p.\n\n The values in the rank-1 array p are coefficients of a polynomial.\n If the length of p is n+1 then the polynomial is\n p[0] * x**n + p[1] * x**(n-1) + ... + p[n-1]*x + p[n]\n \"\"\"\n # If input is scalar, this makes it an array\n eig = get_eigval_func()\n p = atleast_1d(p)\n if len(p.shape) != 1:\n raise ValueError,\"Input must be a rank-1 array.\"\n \n # find non-zero array entries\n non_zero = nonzero(ravel(p))\n\n # find the number of trailing zeros -- this is the number of roots at 0.\n trailing_zeros = len(p) - non_zero[-1] - 1\n\n # strip leading and trailing zeros\n p = p[int(non_zero[0]):int(non_zero[-1])+1]\n \n # casting: if incoming array isn't floating point, make it floating point.\n if p.typecode() not in ['f','d','F','D']:\n p = p.astype('d')\n\n N = len(p)\n if N > 1:\n # build companion matrix and find its eigenvalues (the roots)\n A = diag(ones((N-2,),p.typecode()),-1)\n A[0,:] = -p[1:] / p[0]\n roots = eig(A)\n else:\n return array([])\n\n # tack any zeros onto the back of the array \n roots = hstack((roots,zeros(trailing_zeros,roots.typecode())))\n return roots\n\ndef polyint(p,m=1,k=None):\n \"\"\"Return the mth analytical integral of the polynomial p.\n\n If k is None, then zero-valued constants of integration are used.\n otherwise, k should be a list of length m (or a scalar if m=1) to\n represent the constants of integration to use for each integration\n (starting with k[0])\n \"\"\"\n m = int(m)\n if m < 0:\n raise ValueError, \"Order of integral must be positive (see polyder)\"\n if k is None:\n k = _nx.zeros(m)\n k = atleast_1d(k)\n if len(k) == 1 and m > 1:\n k = k[0]*_nx.ones(m)\n if len(k) < m:\n raise ValueError, \\\n \"k must be a scalar or a rank-1 array of length 1 or >m.\"\n if m == 0:\n return p\n else:\n truepoly = isinstance(p,poly1d)\n p = asarray(p)\n y = _nx.zeros(len(p)+1,'d')\n y[:-1] = p*1.0/_nx.arange(len(p),0,-1)\n y[-1] = k[0] \n val = polyint(y,m-1,k=k[1:])\n if truepoly:\n val = poly1d(val)\n return val\n \ndef polyder(p,m=1):\n \"\"\"Return the mth derivative of the polynomial p.\n \"\"\"\n m = int(m)\n truepoly = isinstance(p,poly1d)\n p = asarray(p)\n n = len(p)-1\n y = p[:-1] * _nx.arange(n,0,-1)\n if m < 0:\n raise ValueError, \"Order of derivative must be positive (see polyint)\"\n if m == 0:\n return p\n else:\n val = polyder(y,m-1)\n if truepoly:\n val = poly1d(val)\n return val\n\ndef polyval(p,x):\n \"\"\"Evaluate the polynomial p at x. If x is a polynomial then composition.\n\n Description:\n\n If p is of length N, this function returns the value:\n p[0]*(x**N-1) + p[1]*(x**N-2) + ... + p[N-2]*x + p[N-1]\n\n x can be a sequence and p(x) will be returned for all elements of x.\n or x can be another polynomial and the composite polynomial p(x) will be\n returned.\n \"\"\"\n p = asarray(p)\n if isinstance(x,poly1d):\n y = 0\n else:\n x = asarray(x)\n y = _nx.zeros(x.shape,x.typecode())\n for i in range(len(p)):\n y = x * y + p[i]\n return y\n\ndef polyadd(a1,a2):\n \"\"\"Adds two polynomials represented as lists\n \"\"\"\n truepoly = (isinstance(a1,poly1d) or isinstance(a2,poly1d))\n a1,a2 = map(atleast_1d,(a1,a2))\n diff = len(a2) - len(a1)\n if diff == 0:\n return a1 + a2\n elif diff > 0:\n zr = _nx.zeros(diff)\n val = _nx.concatenate((zr,a1)) + a2\n else:\n zr = _nx.zeros(abs(diff))\n val = a1 + _nx.concatenate((zr,a2))\n if truepoly:\n val = poly1d(val)\n return val\n\ndef polysub(a1,a2):\n \"\"\"Subtracts two polynomials represented as lists\n \"\"\"\n truepoly = (isinstance(a1,poly1d) or isinstance(a2,poly1d))\n a1,a2 = map(atleast_1d,(a1,a2))\n diff = len(a2) - len(a1)\n if diff == 0:\n return a1 - a2\n elif diff > 0:\n zr = _nx.zeros(diff)\n val = _nx.concatenate((zr,a1)) - a2\n else:\n zr = _nx.zeros(abs(diff))\n val = a1 - _nx.concatenate((zr,a2))\n if truepoly:\n val = poly1d(val)\n return val\n\n\ndef polymul(a1,a2):\n \"\"\"Multiplies two polynomials represented as lists.\n \"\"\"\n truepoly = (isinstance(a1,poly1d) or isinstance(a2,poly1d))\n val = _nx.convolve(a1,a2)\n if truepoly:\n val = poly1d(val)\n return val\n\ndef polydiv(a1,a2):\n \"\"\"Computes q and r polynomials so that a1(s) = q(s)*a2(s) + r(s)\n \"\"\"\n truepoly = (isinstance(a1,poly1d) or isinstance(a2,poly1d))\n q, r = deconvolve(a1,a2)\n while _nx.allclose(r[0], 0, rtol=1e-14) and (r.shape[-1] > 1):\n r = r[1:]\n if truepoly:\n q, r = map(poly1d,(q,r))\n return q, r\n\ndef deconvolve(signal, divisor):\n \"\"\"Deconvolves divisor out of signal.\n \"\"\"\n try:\n import scipy.signal\n except:\n print \"You need scipy.signal to use this function.\"\n num = atleast_1d(signal)\n den = atleast_1d(divisor)\n N = len(num)\n D = len(den)\n if D > N:\n quot = [];\n rem = num;\n else:\n input = _nx.ones(N-D+1,_nx.Float)\n input[1:] = 0\n quot = scipy.signal.lfilter(num, den, input)\n rem = num - _nx.convolve(den,quot,mode=2)\n return quot, rem\n\nimport re\n_poly_mat = re.compile(r\"[*][*]([0-9]*)\")\ndef _raise_power(astr, wrap=70):\n n = 0\n line1 = ''\n line2 = ''\n output = ' '\n while 1:\n mat = _poly_mat.search(astr,n)\n if mat is None:\n break\n span = mat.span()\n power = mat.groups()[0]\n partstr = astr[n:span[0]]\n n = span[1]\n toadd2 = partstr + ' '*(len(power)-1)\n toadd1 = ' '*(len(partstr)-1) + power\n if ((len(line2)+len(toadd2) > wrap) or \\\n (len(line1)+len(toadd1) > wrap)):\n output += line1 + \"\\n\" + line2 + \"\\n \"\n line1 = toadd1\n line2 = toadd2\n else: \n line2 += partstr + ' '*(len(power)-1)\n line1 += ' '*(len(partstr)-1) + power\n output += line1 + \"\\n\" + line2\n return output + astr[n:]\n \n \nclass poly1d:\n \"\"\"A one-dimensional polynomial class.\n\n p = poly1d([1,2,3]) constructs the polynomial x**2 + 2 x + 3\n\n p(0.5) evaluates the polynomial at the location\n p.r is a list of roots\n p.c is the coefficient array [1,2,3]\n p.order is the polynomial order (after leading zeros in p.c are removed)\n p[k] is the coefficient on the kth power of x (backwards from\n sequencing the coefficient array.\n\n polynomials can be added, substracted, multplied and divided (returns\n quotient and remainder).\n asarray(p) will also give the coefficient array, so polynomials can\n be used in all functions that accept arrays.\n \"\"\"\n def __init__(self, c_or_r, r=0):\n if isinstance(c_or_r,poly1d):\n for key in c_or_r.__dict__.keys():\n self.__dict__[key] = c_or_r.__dict__[key]\n return\n if r:\n c_or_r = poly(c_or_r)\n c_or_r = atleast_1d(c_or_r)\n if len(c_or_r.shape) > 1:\n raise ValueError, \"Polynomial must be 1d only.\"\n c_or_r = trim_zeros(c_or_r, trim='f')\n if len(c_or_r) == 0:\n c_or_r = _nx.array([0])\n self.__dict__['coeffs'] = c_or_r\n self.__dict__['order'] = len(c_or_r) - 1\n\n def __array__(self,t=None):\n if t:\n return asarray(self.coeffs,t)\n else:\n return asarray(self.coeffs)\n\n def __coerce__(self,other):\n return None\n \n def __repr__(self):\n vals = repr(self.coeffs)\n vals = vals[6:-1]\n return \"poly1d(%s)\" % vals\n\n def __len__(self):\n return self.order\n\n def __str__(self):\n N = self.order\n thestr = \"0\"\n for k in range(len(self.coeffs)):\n coefstr ='%.4g' % abs(self.coeffs[k])\n if coefstr[-4:] == '0000':\n coefstr = coefstr[:-5]\n power = (N-k)\n if power == 0:\n if coefstr != '0':\n newstr = '%s' % (coefstr,)\n else:\n if k == 0:\n newstr = '0'\n else:\n newstr = ''\n elif power == 1:\n if coefstr == '0':\n newstr = ''\n elif coefstr == '1':\n newstr = 'x'\n else: \n newstr = '%s x' % (coefstr,)\n else:\n if coefstr == '0':\n newstr = ''\n elif coefstr == '1':\n newstr = 'x**%d' % (power,)\n else: \n newstr = '%s x**%d' % (coefstr, power)\n\n if k > 0:\n if newstr != '':\n if self.coeffs[k] < 0:\n thestr = \"%s - %s\" % (thestr, newstr)\n else:\n thestr = \"%s + %s\" % (thestr, newstr)\n elif (k == 0) and (newstr != '') and (self.coeffs[k] < 0):\n thestr = \"-%s\" % (newstr,)\n else:\n thestr = newstr\n return _raise_power(thestr)\n \n\n def __call__(self, val):\n return polyval(self.coeffs, val)\n\n def __mul__(self, other):\n if isscalar(other):\n return poly1d(self.coeffs * other)\n else:\n other = poly1d(other)\n return poly1d(polymul(self.coeffs, other.coeffs))\n\n def __rmul__(self, other):\n if isscalar(other):\n return poly1d(other * self.coeffs)\n else:\n other = poly1d(other)\n return poly1d(polymul(self.coeffs, other.coeffs)) \n \n def __add__(self, other):\n other = poly1d(other)\n return poly1d(polyadd(self.coeffs, other.coeffs)) \n \n def __radd__(self, other):\n other = poly1d(other)\n return poly1d(polyadd(self.coeffs, other.coeffs))\n\n def __pow__(self, val):\n if not isscalar(val) or int(val) != val or val < 0:\n raise ValueError, \"Power to non-negative integers only.\"\n res = [1]\n for k in range(val):\n res = polymul(self.coeffs, res)\n return poly1d(res)\n\n def __sub__(self, other):\n other = poly1d(other)\n return poly1d(polysub(self.coeffs, other.coeffs))\n\n def __rsub__(self, other):\n other = poly1d(other)\n return poly1d(polysub(other.coeffs, self.coeffs))\n\n def __div__(self, other):\n if isscalar(other):\n return poly1d(self.coeffs/other)\n else:\n other = poly1d(other)\n return map(poly1d,polydiv(self.coeffs, other.coeffs))\n\n def __rdiv__(self, other):\n if isscalar(other):\n return poly1d(other/self.coeffs)\n else:\n other = poly1d(other)\n return map(poly1d,polydiv(other.coeffs, self.coeffs))\n\n def __setattr__(self, key, val):\n raise ValueError, \"Attributes cannot be changed this way.\"\n\n def __getattr__(self, key):\n if key in ['r','roots']:\n return roots(self.coeffs)\n elif key in ['c','coef','coefficients']:\n return self.coeffs\n elif key in ['o']:\n return self.order\n else:\n return self.__dict__[key]\n \n def __getitem__(self, val):\n ind = self.order - val\n if val > self.order:\n return 0\n if val < 0:\n return 0\n return self.coeffs[ind]\n\n def __setitem__(self, key, val):\n ind = self.order - key\n if key < 0:\n raise ValueError, \"Does not support negative powers.\"\n if key > self.order:\n zr = _nx.zeros(key-self.order,self.coeffs.typecode())\n self.__dict__['coeffs'] = _nx.concatenate((zr,self.coeffs))\n self.__dict__['order'] = key\n ind = 0\n self.__dict__['coeffs'][ind] = val\n return\n\n def integ(self, m=1, k=0):\n return poly1d(polyint(self.coeffs,m=m,k=k))\n\n def deriv(self, m=1):\n return poly1d(polyder(self.coeffs,m=m))\n", "methods": [ { "name": "get_eigval_func", "long_name": "get_eigval_func( )", "filename": "polynomial.py", "nloc": 11, "complexity": 3, "token_count": 40, "parameters": [], "start_line": 13, "end_line": 23, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 0 }, { "name": "poly", "long_name": "poly( seq_of_zeros )", "filename": "polynomial.py", "nloc": 23, "complexity": 9, "token_count": 195, "parameters": [ "seq_of_zeros" ], "start_line": 25, "end_line": 62, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 38, "top_nesting_level": 0 }, { "name": "roots", "long_name": "roots( p )", "filename": "polynomial.py", "nloc": 19, "complexity": 4, "token_count": 190, "parameters": [ "p" ], "start_line": 64, "end_line": 101, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 38, "top_nesting_level": 0 }, { "name": "polyint", "long_name": "polyint( p , m = 1 , k = None )", "filename": "polynomial.py", "nloc": 24, "complexity": 8, "token_count": 190, "parameters": [ "p", "m", "k" ], "start_line": 103, "end_line": 133, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 31, "top_nesting_level": 0 }, { "name": "polyder", "long_name": "polyder( p , m = 1 )", "filename": "polynomial.py", "nloc": 15, "complexity": 4, "token_count": 97, "parameters": [ "p", "m" ], "start_line": 135, "end_line": 151, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "polyval", "long_name": "polyval( p , x )", "filename": "polynomial.py", "nloc": 10, "complexity": 3, "token_count": 72, "parameters": [ "p", "x" ], "start_line": 153, "end_line": 173, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 21, "top_nesting_level": 0 }, { "name": "polyadd", "long_name": "polyadd( a1 , a2 )", "filename": "polynomial.py", "nloc": 15, "complexity": 5, "token_count": 124, "parameters": [ "a1", "a2" ], "start_line": 175, "end_line": 191, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "polysub", "long_name": "polysub( a1 , a2 )", "filename": "polynomial.py", "nloc": 15, "complexity": 5, "token_count": 124, "parameters": [ "a1", "a2" ], "start_line": 193, "end_line": 209, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "polymul", "long_name": "polymul( a1 , a2 )", "filename": "polynomial.py", "nloc": 6, "complexity": 3, "token_count": 46, "parameters": [ "a1", "a2" ], "start_line": 212, "end_line": 219, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "polydiv", "long_name": "polydiv( a1 , a2 )", "filename": "polynomial.py", "nloc": 8, "complexity": 5, "token_count": 94, "parameters": [ "a1", "a2" ], "start_line": 221, "end_line": 230, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "deconvolve", "long_name": "deconvolve( signal , divisor )", "filename": "polynomial.py", "nloc": 18, "complexity": 3, "token_count": 115, "parameters": [ "signal", "divisor" ], "start_line": 232, "end_line": 251, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "_raise_power", "long_name": "_raise_power( astr , wrap = 70 )", "filename": "polynomial.py", "nloc": 25, "complexity": 5, "token_count": 194, "parameters": [ "astr", "wrap" ], "start_line": 255, "end_line": 279, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , c_or_r , r = 0 )", "filename": "polynomial.py", "nloc": 15, "complexity": 6, "token_count": 122, "parameters": [ "self", "c_or_r", "r" ], "start_line": 299, "end_line": 313, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 15, "top_nesting_level": 1 }, { "name": "__array__", "long_name": "__array__( self , t = None )", "filename": "polynomial.py", "nloc": 5, "complexity": 2, "token_count": 30, "parameters": [ "self", "t" ], "start_line": 315, "end_line": 319, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "__coerce__", "long_name": "__coerce__( self , other )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self", "other" ], "start_line": 321, "end_line": 322, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__repr__", "long_name": "__repr__( self )", "filename": "polynomial.py", "nloc": 4, "complexity": 1, "token_count": 26, "parameters": [ "self" ], "start_line": 324, "end_line": 327, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "__len__", "long_name": "__len__( self )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 329, "end_line": 330, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__str__", "long_name": "__str__( self )", "filename": "polynomial.py", "nloc": 41, "complexity": 17, "token_count": 244, "parameters": [ "self" ], "start_line": 332, "end_line": 373, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 42, "top_nesting_level": 1 }, { "name": "__call__", "long_name": "__call__( self , val )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 16, "parameters": [ "self", "val" ], "start_line": 376, "end_line": 377, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__mul__", "long_name": "__mul__( self , other )", "filename": "polynomial.py", "nloc": 6, "complexity": 2, "token_count": 44, "parameters": [ "self", "other" ], "start_line": 379, "end_line": 384, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "__rmul__", "long_name": "__rmul__( self , other )", "filename": "polynomial.py", "nloc": 6, "complexity": 2, "token_count": 44, "parameters": [ "self", "other" ], "start_line": 386, "end_line": 391, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "__add__", "long_name": "__add__( self , other )", "filename": "polynomial.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "self", "other" ], "start_line": 393, "end_line": 395, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "__radd__", "long_name": "__radd__( self , other )", "filename": "polynomial.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "self", "other" ], "start_line": 397, "end_line": 399, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "__pow__", "long_name": "__pow__( self , val )", "filename": "polynomial.py", "nloc": 7, "complexity": 5, "token_count": 57, "parameters": [ "self", "val" ], "start_line": 401, "end_line": 407, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "__sub__", "long_name": "__sub__( self , other )", "filename": "polynomial.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "self", "other" ], "start_line": 409, "end_line": 411, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "__rsub__", "long_name": "__rsub__( self , other )", "filename": "polynomial.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "self", "other" ], "start_line": 413, "end_line": 415, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "__div__", "long_name": "__div__( self , other )", "filename": "polynomial.py", "nloc": 6, "complexity": 2, "token_count": 46, "parameters": [ "self", "other" ], "start_line": 417, "end_line": 422, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "__rdiv__", "long_name": "__rdiv__( self , other )", "filename": "polynomial.py", "nloc": 6, "complexity": 2, "token_count": 46, "parameters": [ "self", "other" ], "start_line": 424, "end_line": 429, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "__setattr__", "long_name": "__setattr__( self , key , val )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self", "key", "val" ], "start_line": 431, "end_line": 432, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__getattr__", "long_name": "__getattr__( self , key )", "filename": "polynomial.py", "nloc": 9, "complexity": 4, "token_count": 58, "parameters": [ "self", "key" ], "start_line": 434, "end_line": 442, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "__getitem__", "long_name": "__getitem__( self , val )", "filename": "polynomial.py", "nloc": 7, "complexity": 3, "token_count": 37, "parameters": [ "self", "val" ], "start_line": 444, "end_line": 450, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "__setitem__", "long_name": "__setitem__( self , key , val )", "filename": "polynomial.py", "nloc": 11, "complexity": 3, "token_count": 94, "parameters": [ "self", "key", "val" ], "start_line": 452, "end_line": 462, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 }, { "name": "integ", "long_name": "integ( self , m = 1 , k = 0 )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 31, "parameters": [ "self", "m", "k" ], "start_line": 464, "end_line": 465, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "deriv", "long_name": "deriv( self , m = 1 )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 23, "parameters": [ "self", "m" ], "start_line": 467, "end_line": 468, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 } ], "methods_before": [ { "name": "get_eigval_func", "long_name": "get_eigval_func( )", "filename": "polynomial.py", "nloc": 17, "complexity": 4, "token_count": 52, "parameters": [], "start_line": 13, "end_line": 29, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "poly", "long_name": "poly( seq_of_zeros )", "filename": "polynomial.py", "nloc": 23, "complexity": 9, "token_count": 195, "parameters": [ "seq_of_zeros" ], "start_line": 31, "end_line": 68, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 38, "top_nesting_level": 0 }, { "name": "roots", "long_name": "roots( p )", "filename": "polynomial.py", "nloc": 19, "complexity": 4, "token_count": 190, "parameters": [ "p" ], "start_line": 70, "end_line": 107, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 38, "top_nesting_level": 0 }, { "name": "polyint", "long_name": "polyint( p , m = 1 , k = None )", "filename": "polynomial.py", "nloc": 24, "complexity": 8, "token_count": 190, "parameters": [ "p", "m", "k" ], "start_line": 109, "end_line": 139, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 31, "top_nesting_level": 0 }, { "name": "polyder", "long_name": "polyder( p , m = 1 )", "filename": "polynomial.py", "nloc": 15, "complexity": 4, "token_count": 97, "parameters": [ "p", "m" ], "start_line": 141, "end_line": 157, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "polyval", "long_name": "polyval( p , x )", "filename": "polynomial.py", "nloc": 10, "complexity": 3, "token_count": 72, "parameters": [ "p", "x" ], "start_line": 159, "end_line": 179, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 21, "top_nesting_level": 0 }, { "name": "polyadd", "long_name": "polyadd( a1 , a2 )", "filename": "polynomial.py", "nloc": 15, "complexity": 5, "token_count": 124, "parameters": [ "a1", "a2" ], "start_line": 181, "end_line": 197, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "polysub", "long_name": "polysub( a1 , a2 )", "filename": "polynomial.py", "nloc": 15, "complexity": 5, "token_count": 124, "parameters": [ "a1", "a2" ], "start_line": 199, "end_line": 215, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 17, "top_nesting_level": 0 }, { "name": "polymul", "long_name": "polymul( a1 , a2 )", "filename": "polynomial.py", "nloc": 6, "complexity": 3, "token_count": 46, "parameters": [ "a1", "a2" ], "start_line": 218, "end_line": 225, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "polydiv", "long_name": "polydiv( a1 , a2 )", "filename": "polynomial.py", "nloc": 8, "complexity": 5, "token_count": 94, "parameters": [ "a1", "a2" ], "start_line": 227, "end_line": 236, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 10, "top_nesting_level": 0 }, { "name": "deconvolve", "long_name": "deconvolve( signal , divisor )", "filename": "polynomial.py", "nloc": 18, "complexity": 3, "token_count": 115, "parameters": [ "signal", "divisor" ], "start_line": 238, "end_line": 257, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "_raise_power", "long_name": "_raise_power( astr , wrap = 70 )", "filename": "polynomial.py", "nloc": 25, "complexity": 5, "token_count": 194, "parameters": [ "astr", "wrap" ], "start_line": 261, "end_line": 285, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 25, "top_nesting_level": 0 }, { "name": "__init__", "long_name": "__init__( self , c_or_r , r = 0 )", "filename": "polynomial.py", "nloc": 15, "complexity": 6, "token_count": 122, "parameters": [ "self", "c_or_r", "r" ], "start_line": 305, "end_line": 319, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 15, "top_nesting_level": 1 }, { "name": "__array__", "long_name": "__array__( self , t = None )", "filename": "polynomial.py", "nloc": 5, "complexity": 2, "token_count": 30, "parameters": [ "self", "t" ], "start_line": 321, "end_line": 325, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 1 }, { "name": "__coerce__", "long_name": "__coerce__( self , other )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self", "other" ], "start_line": 327, "end_line": 328, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__repr__", "long_name": "__repr__( self )", "filename": "polynomial.py", "nloc": 4, "complexity": 1, "token_count": 26, "parameters": [ "self" ], "start_line": 330, "end_line": 333, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 1 }, { "name": "__len__", "long_name": "__len__( self )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 9, "parameters": [ "self" ], "start_line": 335, "end_line": 336, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__str__", "long_name": "__str__( self )", "filename": "polynomial.py", "nloc": 41, "complexity": 17, "token_count": 244, "parameters": [ "self" ], "start_line": 338, "end_line": 379, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 42, "top_nesting_level": 1 }, { "name": "__call__", "long_name": "__call__( self , val )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 16, "parameters": [ "self", "val" ], "start_line": 382, "end_line": 383, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__mul__", "long_name": "__mul__( self , other )", "filename": "polynomial.py", "nloc": 6, "complexity": 2, "token_count": 44, "parameters": [ "self", "other" ], "start_line": 385, "end_line": 390, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "__rmul__", "long_name": "__rmul__( self , other )", "filename": "polynomial.py", "nloc": 6, "complexity": 2, "token_count": 44, "parameters": [ "self", "other" ], "start_line": 392, "end_line": 397, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "__add__", "long_name": "__add__( self , other )", "filename": "polynomial.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "self", "other" ], "start_line": 399, "end_line": 401, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "__radd__", "long_name": "__radd__( self , other )", "filename": "polynomial.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "self", "other" ], "start_line": 403, "end_line": 405, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "__pow__", "long_name": "__pow__( self , val )", "filename": "polynomial.py", "nloc": 7, "complexity": 5, "token_count": 57, "parameters": [ "self", "val" ], "start_line": 407, "end_line": 413, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "__sub__", "long_name": "__sub__( self , other )", "filename": "polynomial.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "self", "other" ], "start_line": 415, "end_line": 417, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "__rsub__", "long_name": "__rsub__( self , other )", "filename": "polynomial.py", "nloc": 3, "complexity": 1, "token_count": 27, "parameters": [ "self", "other" ], "start_line": 419, "end_line": 421, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 1 }, { "name": "__div__", "long_name": "__div__( self , other )", "filename": "polynomial.py", "nloc": 6, "complexity": 2, "token_count": 46, "parameters": [ "self", "other" ], "start_line": 423, "end_line": 428, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "__rdiv__", "long_name": "__rdiv__( self , other )", "filename": "polynomial.py", "nloc": 6, "complexity": 2, "token_count": 46, "parameters": [ "self", "other" ], "start_line": 430, "end_line": 435, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 1 }, { "name": "__setattr__", "long_name": "__setattr__( self , key , val )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 13, "parameters": [ "self", "key", "val" ], "start_line": 437, "end_line": 438, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "__getattr__", "long_name": "__getattr__( self , key )", "filename": "polynomial.py", "nloc": 9, "complexity": 4, "token_count": 58, "parameters": [ "self", "key" ], "start_line": 440, "end_line": 448, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 1 }, { "name": "__getitem__", "long_name": "__getitem__( self , val )", "filename": "polynomial.py", "nloc": 7, "complexity": 3, "token_count": 37, "parameters": [ "self", "val" ], "start_line": 450, "end_line": 456, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 7, "top_nesting_level": 1 }, { "name": "__setitem__", "long_name": "__setitem__( self , key , val )", "filename": "polynomial.py", "nloc": 11, "complexity": 3, "token_count": 94, "parameters": [ "self", "key", "val" ], "start_line": 458, "end_line": 468, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 1 }, { "name": "integ", "long_name": "integ( self , m = 1 , k = 0 )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 31, "parameters": [ "self", "m", "k" ], "start_line": 470, "end_line": 471, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 }, { "name": "deriv", "long_name": "deriv( self , m = 1 )", "filename": "polynomial.py", "nloc": 2, "complexity": 1, "token_count": 23, "parameters": [ "self", "m" ], "start_line": 473, "end_line": 474, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 2, "top_nesting_level": 1 } ], "changed_methods": [ { "name": "get_eigval_func", "long_name": "get_eigval_func( )", "filename": "polynomial.py", "nloc": 11, "complexity": 3, "token_count": 40, "parameters": [], "start_line": 13, "end_line": 23, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 11, "top_nesting_level": 0 } ], "nloc": 364, "complexity": 116, "token_count": 2644, "diff_parsed": { "added": [ " from numerix import eigenvalues as eigvals" ], "deleted": [ " try:", " import LinearAlgebra", " eigvals = LinearAlgebra.eigenvalues", " except:", " raise ImportError, \\", " \"You must have scipy.linalg or LinearAlgebra to \"\\", " \"use this function.\"" ] } }, { "old_path": "scipy_base/scimath.py", "new_path": "scipy_base/scimath.py", "filename": "scimath.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -10,7 +10,6 @@\n from numerix import *\n \n from type_check import isreal, asarray\n-from function_base import any\n \n __all__.extend([key for key in dir(_nx.fastumath) \\\n if key[0]!='_' and key not in __all__])\n@@ -25,7 +24,7 @@ def _fix_real_lt_zero(x):\n x = asarray(x)\n if any(isreal(x) & (x<0)):\n x = _tocomplex(x)\n- return x\n+ return asscalar(x)\n \n def _fix_real_abs_gt_1(x):\n x = asarray(x)\n", "added_lines": 1, "deleted_lines": 2, "source_code": "\"\"\"\nWrapper functions to more user-friendly calling of certain math functions\nwhose output is different than the input in certain domains of the input.\n\"\"\"\n\n__all__ = ['sqrt', 'log', 'log2','logn','log10', 'power', 'arccos',\n 'arcsin', 'arctanh']\n\nimport numerix as _nx\nfrom numerix import *\n\nfrom type_check import isreal, asarray\n\n__all__.extend([key for key in dir(_nx.fastumath) \\\n if key[0]!='_' and key not in __all__])\n\ndef _tocomplex(arr):\n if arr.typecode() in ['f', 's', 'b', '1','w']:\n return arr.astype('F')\n else:\n return arr.astype('D')\n\ndef _fix_real_lt_zero(x):\n x = asarray(x)\n if any(isreal(x) & (x<0)):\n x = _tocomplex(x)\n return asscalar(x)\n\ndef _fix_real_abs_gt_1(x):\n x = asarray(x)\n if any(isreal(x) & (abs(x)>1)):\n x = _tocomplex(x)\n return x\n \ndef sqrt(x):\n x = _fix_real_lt_zero(x)\n return fastumath.sqrt(x)\n\ndef log(x):\n x = _fix_real_lt_zero(x)\n return fastumath.log(x)\n\ndef log10(x):\n x = _fix_real_lt_zero(x)\n return fastumath.log10(x) \n\ndef logn(n,x):\n \"\"\" Take log base n of x.\n \"\"\"\n x = _fix_real_lt_zero(x)\n n = _fix_real_lt_zero(n)\n return fastumath.log(x)/fastumath.log(n)\n\ndef log2(x):\n \"\"\" Take log base 2 of x.\n \"\"\"\n x = _fix_real_lt_zero(x)\n return fastumath.log(x)/fastumath.log(2)\n\ndef power(x, p):\n x = _fix_real_lt_zero(x)\n return fastumath.power(x, p)\n\n\ndef arccos(x):\n x = _fix_real_abs_gt_1(x)\n return fastumath.arccos(x)\n\ndef arcsin(x):\n x = _fix_real_abs_gt_1(x)\n return fastumath.arcsin(x)\n\ndef arctanh(x):\n x = _fix_real_abs_gt_1(x)\n return fastumath.arctanh(x)\n", "source_code_before": "\"\"\"\nWrapper functions to more user-friendly calling of certain math functions\nwhose output is different than the input in certain domains of the input.\n\"\"\"\n\n__all__ = ['sqrt', 'log', 'log2','logn','log10', 'power', 'arccos',\n 'arcsin', 'arctanh']\n\nimport numerix as _nx\nfrom numerix import *\n\nfrom type_check import isreal, asarray\nfrom function_base import any\n\n__all__.extend([key for key in dir(_nx.fastumath) \\\n if key[0]!='_' and key not in __all__])\n\ndef _tocomplex(arr):\n if arr.typecode() in ['f', 's', 'b', '1','w']:\n return arr.astype('F')\n else:\n return arr.astype('D')\n\ndef _fix_real_lt_zero(x):\n x = asarray(x)\n if any(isreal(x) & (x<0)):\n x = _tocomplex(x)\n return x\n\ndef _fix_real_abs_gt_1(x):\n x = asarray(x)\n if any(isreal(x) & (abs(x)>1)):\n x = _tocomplex(x)\n return x\n \ndef sqrt(x):\n x = _fix_real_lt_zero(x)\n return fastumath.sqrt(x)\n\ndef log(x):\n x = _fix_real_lt_zero(x)\n return fastumath.log(x)\n\ndef log10(x):\n x = _fix_real_lt_zero(x)\n return fastumath.log10(x) \n\ndef logn(n,x):\n \"\"\" Take log base n of x.\n \"\"\"\n x = _fix_real_lt_zero(x)\n n = _fix_real_lt_zero(n)\n return fastumath.log(x)/fastumath.log(n)\n\ndef log2(x):\n \"\"\" Take log base 2 of x.\n \"\"\"\n x = _fix_real_lt_zero(x)\n return fastumath.log(x)/fastumath.log(2)\n\ndef power(x, p):\n x = _fix_real_lt_zero(x)\n return fastumath.power(x, p)\n\n\ndef arccos(x):\n x = _fix_real_abs_gt_1(x)\n return fastumath.arccos(x)\n\ndef arcsin(x):\n x = _fix_real_abs_gt_1(x)\n return fastumath.arcsin(x)\n\ndef arctanh(x):\n x = _fix_real_abs_gt_1(x)\n return fastumath.arctanh(x)\n", "methods": [ { "name": "_tocomplex", "long_name": "_tocomplex( arr )", "filename": "scimath.py", "nloc": 5, "complexity": 2, "token_count": 40, "parameters": [ "arr" ], "start_line": 17, "end_line": 21, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "_fix_real_lt_zero", "long_name": "_fix_real_lt_zero( x )", "filename": "scimath.py", "nloc": 5, "complexity": 2, "token_count": 37, "parameters": [ "x" ], "start_line": 23, "end_line": 27, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "_fix_real_abs_gt_1", "long_name": "_fix_real_abs_gt_1( x )", "filename": "scimath.py", "nloc": 5, "complexity": 2, "token_count": 37, "parameters": [ "x" ], "start_line": 29, "end_line": 33, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "sqrt", "long_name": "sqrt( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 35, "end_line": 37, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "log", "long_name": "log( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 39, "end_line": 41, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "log10", "long_name": "log10( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 43, "end_line": 45, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "logn", "long_name": "logn( n , x )", "filename": "scimath.py", "nloc": 4, "complexity": 1, "token_count": 34, "parameters": [ "n", "x" ], "start_line": 47, "end_line": 52, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "log2", "long_name": "log2( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 26, "parameters": [ "x" ], "start_line": 54, "end_line": 58, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "power", "long_name": "power( x , p )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 22, "parameters": [ "x", "p" ], "start_line": 60, "end_line": 62, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "arccos", "long_name": "arccos( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 65, "end_line": 67, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "arcsin", "long_name": "arcsin( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 69, "end_line": 71, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "arctanh", "long_name": "arctanh( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 73, "end_line": 75, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 } ], "methods_before": [ { "name": "_tocomplex", "long_name": "_tocomplex( arr )", "filename": "scimath.py", "nloc": 5, "complexity": 2, "token_count": 40, "parameters": [ "arr" ], "start_line": 18, "end_line": 22, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "_fix_real_lt_zero", "long_name": "_fix_real_lt_zero( x )", "filename": "scimath.py", "nloc": 5, "complexity": 2, "token_count": 34, "parameters": [ "x" ], "start_line": 24, "end_line": 28, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "_fix_real_abs_gt_1", "long_name": "_fix_real_abs_gt_1( x )", "filename": "scimath.py", "nloc": 5, "complexity": 2, "token_count": 37, "parameters": [ "x" ], "start_line": 30, "end_line": 34, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "sqrt", "long_name": "sqrt( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 36, "end_line": 38, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "log", "long_name": "log( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 40, "end_line": 42, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "log10", "long_name": "log10( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 44, "end_line": 46, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "logn", "long_name": "logn( n , x )", "filename": "scimath.py", "nloc": 4, "complexity": 1, "token_count": 34, "parameters": [ "n", "x" ], "start_line": 48, "end_line": 53, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 6, "top_nesting_level": 0 }, { "name": "log2", "long_name": "log2( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 26, "parameters": [ "x" ], "start_line": 55, "end_line": 59, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 }, { "name": "power", "long_name": "power( x , p )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 22, "parameters": [ "x", "p" ], "start_line": 61, "end_line": 63, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "arccos", "long_name": "arccos( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 66, "end_line": 68, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "arcsin", "long_name": "arcsin( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 70, "end_line": 72, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 }, { "name": "arctanh", "long_name": "arctanh( x )", "filename": "scimath.py", "nloc": 3, "complexity": 1, "token_count": 18, "parameters": [ "x" ], "start_line": 74, "end_line": 76, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 3, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "_fix_real_lt_zero", "long_name": "_fix_real_lt_zero( x )", "filename": "scimath.py", "nloc": 5, "complexity": 2, "token_count": 37, "parameters": [ "x" ], "start_line": 23, "end_line": 27, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 5, "top_nesting_level": 0 } ], "nloc": 54, "complexity": 15, "token_count": 382, "diff_parsed": { "added": [ " return asscalar(x)" ], "deleted": [ "from function_base import any", " return x" ] } } ] }, { "hash": "76bfd2cef62471cfe6cafd633f0e973cb5901f58", "msg": "Fixed bmat", "author": { "name": "Travis Oliphant", "email": "oliphant@enthought.com" }, "committer": { "name": "Travis Oliphant", "email": "oliphant@enthought.com" }, "author_date": "2005-05-12T22:40:33+00:00", "author_timezone": 0, "committer_date": "2005-05-12T22:40:33+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "195ab4a92b1d42a6926db3f9095ef774998270cc" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 4, "insertions": 4, "lines": 8, "files": 1, "dmm_unit_size": null, "dmm_unit_complexity": null, "dmm_unit_interfacing": null, "modified_files": [ { "old_path": "scipy_base/matrix_base.py", "new_path": "scipy_base/matrix_base.py", "filename": "matrix_base.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -121,17 +121,17 @@ def bmat(obj,gdict=None,ldict=None):\n glob_dict = gdict\n loc_dict = ldict\n \n- return Matrix.Matrix(_from_string(obj, glob_dict, loc_dict))\n+ return Matrix(_from_string(obj, glob_dict, loc_dict))\n \n if isinstance(obj, (types.TupleType, types.ListType)):\n # [[A,B],[C,D]]\n arr_rows = []\n for row in obj:\n if isinstance(row, ArrayType): # not 2-d\n- return Matrix.Matrix(concatenate(obj,axis=-1))\n+ return Matrix(concatenate(obj,axis=-1))\n else:\n arr_rows.append(concatenate(row,axis=-1))\n- return Matrix.Matrix(concatenate(arr_rows,axis=0))\n+ return Matrix(concatenate(arr_rows,axis=0))\n if isinstance(obj, ArrayType):\n- return Matrix.Matrix(obj)\n+ return Matrix(obj)\n \n", "added_lines": 4, "deleted_lines": 4, "source_code": "\"\"\" Basic functions for manipulating 2d arrays\n\n\"\"\"\n\n__all__ = ['diag','eye','fliplr','flipud','rot90','bmat','matrix']\n\nfrom numerix import *\nfrom type_check import asarray\nimport sys\n\nmatrix = Matrix\n\ndef fliplr(m):\n \"\"\" returns a 2-D matrix m with the rows preserved and columns flipped \n in the left/right direction. Only works with 2-D arrays.\n \"\"\"\n m = asarray(m)\n if len(m.shape) != 2:\n raise ValueError, \"Input must be 2-D.\"\n return m[:, ::-1]\n\ndef flipud(m):\n \"\"\" returns a 2-D matrix with the columns preserved and rows flipped in\n the up/down direction. Only works with 2-D arrays.\n \"\"\"\n m = asarray(m)\n if len(m.shape) != 2:\n raise ValueError, \"Input must be 2-D.\"\n return m[::-1]\n \n# reshape(x, m, n) is not used, instead use reshape(x, (m, n))\n\ndef rot90(m, k=1):\n \"\"\" returns the matrix found by rotating m by k*90 degrees in the \n counterclockwise direction.\n \"\"\"\n m = asarray(m)\n if len(m.shape) != 2:\n raise ValueError, \"Input must be 2-D.\"\n k = k % 4\n if k == 0: return m\n elif k == 1: return transpose(fliplr(m))\n elif k == 2: return fliplr(flipud(m))\n else: return fliplr(transpose(m)) # k==3\n \ndef eye(N, M=None, k=0, typecode='d'):\n \"\"\" eye returns a N-by-M matrix where the k-th diagonal is all ones, \n and everything else is zeros.\n \"\"\"\n if M is None: M = N\n if type(M) == type('d'): \n typecode = M\n M = N\n m = equal(subtract.outer(arange(N), arange(M)),-k)\n if typecode is None:\n return m\n else:\n return m.astype(typecode)\n\ndef diag(v, k=0):\n \"\"\" returns the k-th diagonal if v is a matrix or returns a matrix \n with v as the k-th diagonal if v is a vector.\n \"\"\"\n v = asarray(v)\n s = v.shape\n if len(s)==1:\n n = s[0]+abs(k)\n if k > 0:\n v = concatenate((zeros(k, v.typecode()),v))\n elif k < 0:\n v = concatenate((v,zeros(-k, v.typecode())))\n return eye(n, k=k)*v\n elif len(s)==2:\n v = add.reduce(eye(s[0], s[1], k=k)*v)\n if k > 0: return v[k:]\n elif k < 0: return v[:k]\n else: return v\n else:\n raise ValueError, \"Input must be 1- or 2-D.\"\n\n\ndef _from_string(str,gdict,ldict):\n rows = str.split(';')\n rowtup = []\n for row in rows:\n trow = row.split(',')\n coltup = []\n for col in trow:\n col = col.strip()\n try:\n thismat = gdict[col]\n except KeyError:\n try:\n thismat = ldict[col]\n except KeyError:\n raise KeyError, \"%s not found\" % (col,)\n \n coltup.append(thismat)\n rowtup.append(concatenate(coltup,axis=-1))\n return concatenate(rowtup,axis=0)\n\ndef bmat(obj,gdict=None,ldict=None):\n \"\"\"Build a matrix object from string, nested sequence, or array.\n\n Ex: F = bmat('A, B; C, D') \n F = bmat([[A,B],[C,D]])\n F = bmat(r_[c_[A,B],c_[C,D]])\n\n all produce the same Matrix Object [ A B ]\n [ C D ]\n \n if A, B, C, and D are appropriately shaped 2-d arrays.\n \"\"\"\n if isinstance(obj, types.StringType):\n if gdict is None:\n # get previous frame\n frame = sys._getframe().f_back\n glob_dict = frame.f_globals\n loc_dict = frame.f_locals\n else:\n glob_dict = gdict\n loc_dict = ldict\n \n return Matrix(_from_string(obj, glob_dict, loc_dict))\n \n if isinstance(obj, (types.TupleType, types.ListType)):\n # [[A,B],[C,D]]\n arr_rows = []\n for row in obj:\n if isinstance(row, ArrayType): # not 2-d\n return Matrix(concatenate(obj,axis=-1))\n else:\n arr_rows.append(concatenate(row,axis=-1))\n return Matrix(concatenate(arr_rows,axis=0))\n if isinstance(obj, ArrayType):\n return Matrix(obj)\n\n", "source_code_before": "\"\"\" Basic functions for manipulating 2d arrays\n\n\"\"\"\n\n__all__ = ['diag','eye','fliplr','flipud','rot90','bmat','matrix']\n\nfrom numerix import *\nfrom type_check import asarray\nimport sys\n\nmatrix = Matrix\n\ndef fliplr(m):\n \"\"\" returns a 2-D matrix m with the rows preserved and columns flipped \n in the left/right direction. Only works with 2-D arrays.\n \"\"\"\n m = asarray(m)\n if len(m.shape) != 2:\n raise ValueError, \"Input must be 2-D.\"\n return m[:, ::-1]\n\ndef flipud(m):\n \"\"\" returns a 2-D matrix with the columns preserved and rows flipped in\n the up/down direction. Only works with 2-D arrays.\n \"\"\"\n m = asarray(m)\n if len(m.shape) != 2:\n raise ValueError, \"Input must be 2-D.\"\n return m[::-1]\n \n# reshape(x, m, n) is not used, instead use reshape(x, (m, n))\n\ndef rot90(m, k=1):\n \"\"\" returns the matrix found by rotating m by k*90 degrees in the \n counterclockwise direction.\n \"\"\"\n m = asarray(m)\n if len(m.shape) != 2:\n raise ValueError, \"Input must be 2-D.\"\n k = k % 4\n if k == 0: return m\n elif k == 1: return transpose(fliplr(m))\n elif k == 2: return fliplr(flipud(m))\n else: return fliplr(transpose(m)) # k==3\n \ndef eye(N, M=None, k=0, typecode='d'):\n \"\"\" eye returns a N-by-M matrix where the k-th diagonal is all ones, \n and everything else is zeros.\n \"\"\"\n if M is None: M = N\n if type(M) == type('d'): \n typecode = M\n M = N\n m = equal(subtract.outer(arange(N), arange(M)),-k)\n if typecode is None:\n return m\n else:\n return m.astype(typecode)\n\ndef diag(v, k=0):\n \"\"\" returns the k-th diagonal if v is a matrix or returns a matrix \n with v as the k-th diagonal if v is a vector.\n \"\"\"\n v = asarray(v)\n s = v.shape\n if len(s)==1:\n n = s[0]+abs(k)\n if k > 0:\n v = concatenate((zeros(k, v.typecode()),v))\n elif k < 0:\n v = concatenate((v,zeros(-k, v.typecode())))\n return eye(n, k=k)*v\n elif len(s)==2:\n v = add.reduce(eye(s[0], s[1], k=k)*v)\n if k > 0: return v[k:]\n elif k < 0: return v[:k]\n else: return v\n else:\n raise ValueError, \"Input must be 1- or 2-D.\"\n\n\ndef _from_string(str,gdict,ldict):\n rows = str.split(';')\n rowtup = []\n for row in rows:\n trow = row.split(',')\n coltup = []\n for col in trow:\n col = col.strip()\n try:\n thismat = gdict[col]\n except KeyError:\n try:\n thismat = ldict[col]\n except KeyError:\n raise KeyError, \"%s not found\" % (col,)\n \n coltup.append(thismat)\n rowtup.append(concatenate(coltup,axis=-1))\n return concatenate(rowtup,axis=0)\n\ndef bmat(obj,gdict=None,ldict=None):\n \"\"\"Build a matrix object from string, nested sequence, or array.\n\n Ex: F = bmat('A, B; C, D') \n F = bmat([[A,B],[C,D]])\n F = bmat(r_[c_[A,B],c_[C,D]])\n\n all produce the same Matrix Object [ A B ]\n [ C D ]\n \n if A, B, C, and D are appropriately shaped 2-d arrays.\n \"\"\"\n if isinstance(obj, types.StringType):\n if gdict is None:\n # get previous frame\n frame = sys._getframe().f_back\n glob_dict = frame.f_globals\n loc_dict = frame.f_locals\n else:\n glob_dict = gdict\n loc_dict = ldict\n \n return Matrix.Matrix(_from_string(obj, glob_dict, loc_dict))\n \n if isinstance(obj, (types.TupleType, types.ListType)):\n # [[A,B],[C,D]]\n arr_rows = []\n for row in obj:\n if isinstance(row, ArrayType): # not 2-d\n return Matrix.Matrix(concatenate(obj,axis=-1))\n else:\n arr_rows.append(concatenate(row,axis=-1))\n return Matrix.Matrix(concatenate(arr_rows,axis=0))\n if isinstance(obj, ArrayType):\n return Matrix.Matrix(obj)\n\n", "methods": [ { "name": "fliplr", "long_name": "fliplr( m )", "filename": "matrix_base.py", "nloc": 5, "complexity": 2, "token_count": 35, "parameters": [ "m" ], "start_line": 13, "end_line": 20, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "flipud", "long_name": "flipud( m )", "filename": "matrix_base.py", "nloc": 5, "complexity": 2, "token_count": 33, "parameters": [ "m" ], "start_line": 22, "end_line": 29, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "rot90", "long_name": "rot90( m , k = 1 )", "filename": "matrix_base.py", "nloc": 9, "complexity": 5, "token_count": 78, "parameters": [ "m", "k" ], "start_line": 33, "end_line": 44, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 }, { "name": "eye", "long_name": "eye( N , M = None , k = 0 , typecode = 'd' )", "filename": "matrix_base.py", "nloc": 10, "complexity": 4, "token_count": 81, "parameters": [ "N", "M", "k", "typecode" ], "start_line": 46, "end_line": 58, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 0 }, { "name": "diag", "long_name": "diag( v , k = 0 )", "filename": "matrix_base.py", "nloc": 17, "complexity": 7, "token_count": 165, "parameters": [ "v", "k" ], "start_line": 60, "end_line": 79, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "_from_string", "long_name": "_from_string( str , gdict , ldict )", "filename": "matrix_base.py", "nloc": 18, "complexity": 5, "token_count": 110, "parameters": [ "str", "gdict", "ldict" ], "start_line": 82, "end_line": 100, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 19, "top_nesting_level": 0 }, { "name": "bmat", "long_name": "bmat( obj , gdict = None , ldict = None )", "filename": "matrix_base.py", "nloc": 20, "complexity": 7, "token_count": 155, "parameters": [ "obj", "gdict", "ldict" ], "start_line": 102, "end_line": 136, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 35, "top_nesting_level": 0 } ], "methods_before": [ { "name": "fliplr", "long_name": "fliplr( m )", "filename": "matrix_base.py", "nloc": 5, "complexity": 2, "token_count": 35, "parameters": [ "m" ], "start_line": 13, "end_line": 20, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "flipud", "long_name": "flipud( m )", "filename": "matrix_base.py", "nloc": 5, "complexity": 2, "token_count": 33, "parameters": [ "m" ], "start_line": 22, "end_line": 29, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 8, "top_nesting_level": 0 }, { "name": "rot90", "long_name": "rot90( m , k = 1 )", "filename": "matrix_base.py", "nloc": 9, "complexity": 5, "token_count": 78, "parameters": [ "m", "k" ], "start_line": 33, "end_line": 44, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 12, "top_nesting_level": 0 }, { "name": "eye", "long_name": "eye( N , M = None , k = 0 , typecode = 'd' )", "filename": "matrix_base.py", "nloc": 10, "complexity": 4, "token_count": 81, "parameters": [ "N", "M", "k", "typecode" ], "start_line": 46, "end_line": 58, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 13, "top_nesting_level": 0 }, { "name": "diag", "long_name": "diag( v , k = 0 )", "filename": "matrix_base.py", "nloc": 17, "complexity": 7, "token_count": 165, "parameters": [ "v", "k" ], "start_line": 60, "end_line": 79, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 20, "top_nesting_level": 0 }, { "name": "_from_string", "long_name": "_from_string( str , gdict , ldict )", "filename": "matrix_base.py", "nloc": 18, "complexity": 5, "token_count": 110, "parameters": [ "str", "gdict", "ldict" ], "start_line": 82, "end_line": 100, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 19, "top_nesting_level": 0 }, { "name": "bmat", "long_name": "bmat( obj , gdict = None , ldict = None )", "filename": "matrix_base.py", "nloc": 20, "complexity": 7, "token_count": 163, "parameters": [ "obj", "gdict", "ldict" ], "start_line": 102, "end_line": 136, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 35, "top_nesting_level": 0 } ], "changed_methods": [ { "name": "bmat", "long_name": "bmat( obj , gdict = None , ldict = None )", "filename": "matrix_base.py", "nloc": 20, "complexity": 7, "token_count": 155, "parameters": [ "obj", "gdict", "ldict" ], "start_line": 102, "end_line": 136, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 35, "top_nesting_level": 0 } ], "nloc": 92, "complexity": 32, "token_count": 695, "diff_parsed": { "added": [ " return Matrix(_from_string(obj, glob_dict, loc_dict))", " return Matrix(concatenate(obj,axis=-1))", " return Matrix(concatenate(arr_rows,axis=0))", " return Matrix(obj)" ], "deleted": [ " return Matrix.Matrix(_from_string(obj, glob_dict, loc_dict))", " return Matrix.Matrix(concatenate(obj,axis=-1))", " return Matrix.Matrix(concatenate(arr_rows,axis=0))", " return Matrix.Matrix(obj)" ] } } ] }, { "hash": "ab64ab765965f7ef3d7b0e76ae0f1cd76ee3173d", "msg": "Fixed the auto-environment setting for NUMARRAY/NUMERIC from NUMERIX.", "author": { "name": "jmiller", "email": "jmiller@localhost" }, "committer": { "name": "jmiller", "email": "jmiller@localhost" }, "author_date": "2005-05-31T14:06:10+00:00", "author_timezone": 0, "committer_date": "2005-05-31T14:06:10+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "76bfd2cef62471cfe6cafd633f0e973cb5901f58" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 3, "insertions": 3, "lines": 6, "files": 1, "dmm_unit_size": null, "dmm_unit_complexity": null, "dmm_unit_interfacing": null, "modified_files": [ { "old_path": "scipy_base/numerix.py", "new_path": "scipy_base/numerix.py", "filename": "numerix.py", "extension": "py", "change_type": "MODIFY", "diff": "@@ -52,6 +52,9 @@\n verbose.report_error(__doc__)\n raise ValueError(\"numerix selector must be either 'Numeric' or 'numarray' but the value obtained from the %s was '%s'.\" % (which[1], which[0]))\n \n+# Tweak the environment for f2py/scipy_distutils, e.g. setenv NUMERIC 1\n+os.environ[ which[0].upper() ] = \"1\" \n+\n if which[0] == \"numarray\":\n from _na_imports import *\n import numarray\n@@ -61,9 +64,6 @@\n else:\n raise RuntimeError(\"invalid numerix selector\")\n \n-# Tweak the environment for f2py/scipy_distutils, e.g. setenv NUMERIC 1\n-os.environ[ which[0].upper() ] = \"1\" \n-\n print 'numerix %s'% NX_VERSION\n \n # ---------------------------------------------------------------\n", "added_lines": 3, "deleted_lines": 3, "source_code": "\"\"\"numerix imports either Numeric or numarray based on various selectors.\n\n0. If the value \"--numarray\" or \"--Numeric\" is specified on the\ncommand line, then numerix imports the specified array package.\n\n1. If the environment variable NUMERIX exists, it's value is used to\nchoose Numeric or numarray.\n\n2. The value of numerix in ~/.matplotlibrc: either Numeric or numarray\n\n\n3. If none of the above is done, the default array package is Numeric.\nBecause the .matplotlibrc always provides *some* value for numerix (it\nhas it's own system of default values), this default is most likely\nnever used.\n\nTo summarize: the commandline is examined first, the rc file second,\nand the default array package is Numeric. \n\"\"\"\n\nimport sys, os\n# from matplotlib import rcParams, verbose\n\nwhich = None, None\n\n# First, see if --numarray or --Numeric was specified on the command\n# line:\nif hasattr(sys, 'argv'): #Once again, Apache mod_python has no argv\n for a in sys.argv:\n if a in [\"--Numeric\", \"--numeric\", \"--NUMERIC\",\n \"--Numarray\", \"--numarray\", \"--NUMARRAY\"]:\n which = a[2:], \"command line\"\n sys.argv.remove(a)\n break\n del a\n\nif os.getenv(\"NUMERIX\"):\n which = os.getenv(\"NUMERIX\"), \"environment var\"\n\n# if which[0] is None: \n# try: # In theory, rcParams always has *some* value for numerix.\n# which = rcParams['numerix'], \"rc\"\n# except KeyError:\n# pass\n\n# If all the above fail, default to Numeric.\nif which[0] is None:\n which = \"numeric\", \"defaulted\"\n\nwhich = which[0].strip().lower(), which[1]\nif which[0] not in [\"numeric\", \"numarray\"]:\n verbose.report_error(__doc__)\n raise ValueError(\"numerix selector must be either 'Numeric' or 'numarray' but the value obtained from the %s was '%s'.\" % (which[1], which[0]))\n\n# Tweak the environment for f2py/scipy_distutils, e.g. setenv NUMERIC 1\nos.environ[ which[0].upper() ] = \"1\" \n\nif which[0] == \"numarray\":\n from _na_imports import *\n import numarray\nelif which[0] == \"numeric\":\n from _nc_imports import *\n import Numeric\nelse:\n raise RuntimeError(\"invalid numerix selector\")\n\nprint 'numerix %s'% NX_VERSION\n\n# ---------------------------------------------------------------\n# Common imports and fixes\n# ---------------------------------------------------------------\n\n# a bug fix for blas numeric suggested by Fernando Perez\nmatrixmultiply=dot\n\ndef any(x):\n \"\"\"Return true if any elements of x are true: sometrue(ravel(x))\n \"\"\"\n return sometrue(ravel(x))\n\n\ndef all(x):\n \"\"\"Return true if all elements of x are true: alltrue(ravel(x))\n \"\"\"\n return alltrue(ravel(x))\n\ndef _import_fail_message(module, version):\n \"\"\"Prints a message when the array package specific version of an extension\n fails to import correctly.\n \"\"\"\n _dict = { \"which\" : which[0],\n \"module\" : module,\n \"specific\" : version + module\n }\n print \"\"\"\\nThe import of the %(which)s version of the %(module)s module, %(specific)s, failed.\\nThis is either because %(which)s was unavailable when scipy was compiled,\\nor because a dependency of %(specific)s could not be satisfied.\\nIf it appears that %(specific)s was not built, make sure you have a working copy of\\n%(which)s and then re-install scipy. Otherwise, the following traceback gives more details:\\n\"\"\" % _dict\n", "source_code_before": "\"\"\"numerix imports either Numeric or numarray based on various selectors.\n\n0. If the value \"--numarray\" or \"--Numeric\" is specified on the\ncommand line, then numerix imports the specified array package.\n\n1. If the environment variable NUMERIX exists, it's value is used to\nchoose Numeric or numarray.\n\n2. The value of numerix in ~/.matplotlibrc: either Numeric or numarray\n\n\n3. If none of the above is done, the default array package is Numeric.\nBecause the .matplotlibrc always provides *some* value for numerix (it\nhas it's own system of default values), this default is most likely\nnever used.\n\nTo summarize: the commandline is examined first, the rc file second,\nand the default array package is Numeric. \n\"\"\"\n\nimport sys, os\n# from matplotlib import rcParams, verbose\n\nwhich = None, None\n\n# First, see if --numarray or --Numeric was specified on the command\n# line:\nif hasattr(sys, 'argv'): #Once again, Apache mod_python has no argv\n for a in sys.argv:\n if a in [\"--Numeric\", \"--numeric\", \"--NUMERIC\",\n \"--Numarray\", \"--numarray\", \"--NUMARRAY\"]:\n which = a[2:], \"command line\"\n sys.argv.remove(a)\n break\n del a\n\nif os.getenv(\"NUMERIX\"):\n which = os.getenv(\"NUMERIX\"), \"environment var\"\n\n# if which[0] is None: \n# try: # In theory, rcParams always has *some* value for numerix.\n# which = rcParams['numerix'], \"rc\"\n# except KeyError:\n# pass\n\n# If all the above fail, default to Numeric.\nif which[0] is None:\n which = \"numeric\", \"defaulted\"\n\nwhich = which[0].strip().lower(), which[1]\nif which[0] not in [\"numeric\", \"numarray\"]:\n verbose.report_error(__doc__)\n raise ValueError(\"numerix selector must be either 'Numeric' or 'numarray' but the value obtained from the %s was '%s'.\" % (which[1], which[0]))\n\nif which[0] == \"numarray\":\n from _na_imports import *\n import numarray\nelif which[0] == \"numeric\":\n from _nc_imports import *\n import Numeric\nelse:\n raise RuntimeError(\"invalid numerix selector\")\n\n# Tweak the environment for f2py/scipy_distutils, e.g. setenv NUMERIC 1\nos.environ[ which[0].upper() ] = \"1\" \n\nprint 'numerix %s'% NX_VERSION\n\n# ---------------------------------------------------------------\n# Common imports and fixes\n# ---------------------------------------------------------------\n\n# a bug fix for blas numeric suggested by Fernando Perez\nmatrixmultiply=dot\n\ndef any(x):\n \"\"\"Return true if any elements of x are true: sometrue(ravel(x))\n \"\"\"\n return sometrue(ravel(x))\n\n\ndef all(x):\n \"\"\"Return true if all elements of x are true: alltrue(ravel(x))\n \"\"\"\n return alltrue(ravel(x))\n\ndef _import_fail_message(module, version):\n \"\"\"Prints a message when the array package specific version of an extension\n fails to import correctly.\n \"\"\"\n _dict = { \"which\" : which[0],\n \"module\" : module,\n \"specific\" : version + module\n }\n print \"\"\"\\nThe import of the %(which)s version of the %(module)s module, %(specific)s, failed.\\nThis is either because %(which)s was unavailable when scipy was compiled,\\nor because a dependency of %(specific)s could not be satisfied.\\nIf it appears that %(specific)s was not built, make sure you have a working copy of\\n%(which)s and then re-install scipy. Otherwise, the following traceback gives more details:\\n\"\"\" % _dict\n", "methods": [ { "name": "any", "long_name": "any( x )", "filename": "numerix.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 76, "end_line": 79, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "all", "long_name": "all( x )", "filename": "numerix.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 82, "end_line": 85, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "_import_fail_message", "long_name": "_import_fail_message( module , version )", "filename": "numerix.py", "nloc": 6, "complexity": 1, "token_count": 32, "parameters": [ "module", "version" ], "start_line": 87, "end_line": 95, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 0 } ], "methods_before": [ { "name": "any", "long_name": "any( x )", "filename": "numerix.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 76, "end_line": 79, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "all", "long_name": "all( x )", "filename": "numerix.py", "nloc": 2, "complexity": 1, "token_count": 14, "parameters": [ "x" ], "start_line": 82, "end_line": 85, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 4, "top_nesting_level": 0 }, { "name": "_import_fail_message", "long_name": "_import_fail_message( module , version )", "filename": "numerix.py", "nloc": 6, "complexity": 1, "token_count": 32, "parameters": [ "module", "version" ], "start_line": 87, "end_line": 95, "fan_in": 0, "fan_out": 0, "general_fan_out": 0, "length": 9, "top_nesting_level": 0 } ], "changed_methods": [], "nloc": 58, "complexity": 3, "token_count": 268, "diff_parsed": { "added": [ "# Tweak the environment for f2py/scipy_distutils, e.g. setenv NUMERIC 1", "os.environ[ which[0].upper() ] = \"1\"", "" ], "deleted": [ "# Tweak the environment for f2py/scipy_distutils, e.g. setenv NUMERIC 1", "os.environ[ which[0].upper() ] = \"1\"", "" ] } } ] }, { "hash": "02b1927f359e141e7d0038d526acb27c2d2af049", "msg": "Fixed nesting of for Mozilla on linux (the remainder of the file\nwas in italics).", "author": { "name": "jmiller", "email": "jmiller@localhost" }, "committer": { "name": "jmiller", "email": "jmiller@localhost" }, "author_date": "2005-05-31T20:29:57+00:00", "author_timezone": 0, "committer_date": "2005-05-31T20:29:57+00:00", "committer_timezone": 0, "branches": [ "main" ], "in_main_branch": true, "merge": false, "parents": [ "ab64ab765965f7ef3d7b0e76ae0f1cd76ee3173d" ], "project_name": "repo_copy", "project_path": "/tmp/tmpm0q74n18/repo_copy", "deletions": 3, "insertions": 3, "lines": 6, "files": 1, "dmm_unit_size": null, "dmm_unit_complexity": null, "dmm_unit_interfacing": null, "modified_files": [ { "old_path": "weave/doc/tutorial.html", "new_path": "weave/doc/tutorial.html", "filename": "tutorial.html", "extension": "html", "change_type": "MODIFY", "diff": "@@ -331,10 +331,10 @@

Testing

\n whether a display exists on a machine? \n

\n \n-\n-\n

\n \n+\n+\n \n \n

Benchmarks

\n@@ -2897,4 +2897,4 @@

Things I wish weave did

\n due to the fact that stdio.h also defines the name. weave\n should probably try and handle this in some way.\n \n-Other things...\n\\ No newline at end of file\n+Other things...\n", "added_lines": 3, "deleted_lines": 3, "source_code": "\n

Weave Documentation

\n

\nBy Eric Jones eric@enthought.com\n

\n

Outline

\n
\n
Introduction\n
Requirements\n
Installation\n
Testing\n
Benchmarks\n
Inline\n
\n
More with printf\n
\n More examples\n
\n
Binary search\n
Dictionary sort\n
Numeric -- cast/copy/transpose\n
wxPython
\n
\n
Keyword options\n
Returning values\n
\n
\n The issue with locals()
\n
\n
A quick look at the code\n
\n Technical Details\n
\n
Converting Types\n
\n
\n Numeric Argument Conversion\n
\n String, List, Tuple, and Dictionary Conversion\n
File Conversion \n
\n Callable, Instance, and Module Conversion \n
Customizing Conversions\n
\n
Compiling Code\n
\"Cataloging\" functions\n
\n
Function Storage\n
The PYTHONCOMPILED evnironment variable
\n
\n
\n
\n
\n
\n
Blitz\n
\n
Requirements\n
Limitations\n
Numeric Efficiency Issues\n
The Tools \n
\n
Parser\n
Blitz and Numeric\n
\n
Type defintions and coersion\n
Cataloging Compiled Functions\n
Checking Array Sizes\n
Creating the Extension Module\n
\n
Extension Modules\n
\n
A Simple Example\n
Fibonacci Example\n
\n
Customizing Type Conversions -- Type Factories (not written)\n
\n
Type Specifications\n
Type Information\n
The Conversion Process \n
\n
\n\n

Introduction

\n\n

\nThe weave package provides tools for including C/C++ code within\nin Python code. This offers both another level of optimization to those who need \nit, and an easy way to modify and extend any supported extension libraries such \nas wxPython and hopefully VTK soon. Inlining C/C++ code within Python generally\nresults in speed ups of 1.5x to 30x speed-up over algorithms written in pure\nPython (However, it is also possible to slow things down...). Generally \nalgorithms that require a large number of calls to the Python API don't benefit\nas much from the conversion to C/C++ as algorithms that have inner loops \ncompletely convertable to C.\n

\nThere are three basic ways to use weave. The \nweave.inline() function executes C code directly within Python, \nand weave.blitz() translates Python Numeric expressions to C++ \nfor fast execution. blitz() was the original reason \nweave was built. For those interested in building extension\nlibraries, the ext_tools module provides classes for building \nextension modules within Python. \n

\nMost of weave's functionality should work on Windows and Unix, \nalthough some of its functionality requires gcc or a similarly \nmodern C++ compiler that handles templates well. Up to now, most testing has \nbeen done on Windows 2000 with Microsoft's C++ compiler (MSVC) and with gcc \n(mingw32 2.95.2 and 2.95.3-6). All tests also pass on Linux (RH 7.1 \nwith gcc 2.96), and I've had reports that it works on Debian also (thanks \nPearu).\n

\nThe inline and blitz provide new functionality to \nPython (although I've recently learned about the PyInline project which may offer \nsimilar functionality to inline). On the other hand, tools for \nbuilding Python extension modules already exists (SWIG, SIP, pycpp, CXX, and \nothers). As of yet, I'm not sure where weave fits in this \nspectrum. It is closest in flavor to CXX in that it makes creating new C/C++ \nextension modules pretty easy. However, if you're wrapping a gaggle of legacy \nfunctions or classes, SWIG and friends are definitely the better choice. \nweave is set up so that you can customize how Python types are \nconverted to C types in weave. This is great for \ninline(), but, for wrapping legacy code, it is more flexible to \nspecify things the other way around -- that is how C types map to Python types. \nThis weave does not do. I guess it would be possible to build \nsuch a tool on top of weave, but with good tools like SWIG around, \nI'm not sure the effort produces any new capabilities. Things like function \noverloading are probably easily implemented in weave and it might \nbe easier to mix Python/C code in function calls, but nothing beyond this comes \nto mind. So, if you're developing new extension modules or optimizing Python \nfunctions in C, weave.ext_tools() might be the tool \nfor you. If you're wrapping legacy code, stick with SWIG.\n

\nThe next several sections give the basics of how to use weave.\nWe'll discuss what's happening under the covers in more detail later \non. Serious users will need to at least look at the type conversion section to \nunderstand how Python variables map to C/C++ types and how to customize this \nbehavior. One other note. If you don't know C or C++ then these docs are \nprobably of very little help to you. Further, it'd be helpful if you know \nsomething about writing Python extensions. weave does quite a \nbit for you, but for anything complex, you'll need to do some conversions, \nreference counting, etc.\n

\n\nNote: weave is actually part of the SciPy package. However, it works fine as a \nstandalone package. The examples here are given as if it is used as a stand \nalone package. If you are using from within scipy, you can use from \nscipy import weave and the examples will work identically.\n\n\n

Requirements

\n
    \n
  • Python\n

    \n I use 2.1.1. Probably 2.0 or higher should work.\n

    \n

  • \n \n
  • C++ compiler\n

    \n weave uses distutils to actually build \n extension modules, so it uses whatever compiler was originally used to \n build Python. weave itself requires a C++ compiler. If \n you used a C++ compiler to build Python, your probably fine.\n

    \n On Unix gcc is the preferred choice because I've done a little \n testing with it. All testing has been done with gcc, but I expect the \n majority of compilers should work for inline and \n ext_tools. The one issue I'm not sure about is that I've \n hard coded things so that compilations are linked with the \n stdc++ library. Is this standard across \n Unix compilers, or is this a gcc-ism?\n

    \n For blitz(), you'll need a reasonably recent version of \n gcc. 2.95.2 works on windows and 2.96 looks fine on Linux. Other \n versions are likely to work. Its likely that KAI's C++ compiler and \n maybe some others will work, but I haven't tried. My advice is to use \n gcc for now unless your willing to tinker with the code some.\n

    \n On Windows, either MSVC or gcc (www.mingw.org\" > mingw32) should work. Again, \n you'll need gcc for blitz() as the\n MSVC compiler doesn't handle templates well.\n

    \n I have not tried Cygwin, so please report success if it works for you.\n

    \n

  • \n\n
  • Numeric (optional)\n

    \n The python Numeric module from here. is required for \n blitz() to work. Be sure and get NumPy, not NumArray\n which is the \"next generation\" implementation. This is not\n required for using inline() or ext_tools.\n

    \n

  • \n
  • scipy_distutils and scipy_test (packaged with weave)\n

    \n These two modules are packaged with weave in both\n the windows installer and the source distributions. If you are using\n CVS, however, you'll need to download these separately (also available\n through CVS at SciPy).\n

    \n

  • \n
\n

\n\n\n

Installation

\n

\nThere are currently two ways to get weave. Fist, \nweave is part of SciPy and installed automatically (as a sub-\npackage) whenever SciPy is installed (although the latest version isn't in \nSciPy yet, so use this one for now). Second, since weave is \nuseful outside of the scientific community, it has been setup so that it can be\nused as a stand-alone module. \n\n

\nThe stand-alone version can be downloaded from here. Unix users should grab the \ntar ball (.tgz file) and install it using the following commands.\n\n

\n    tar -xzvf weave-0.2.tar.gz\n    cd weave-0.2\n    python setup.py install\n    
\n\nThis will also install two other packages, scipy_distutils and \nscipy_test. The first is needed by the setup process itself and \nboth are used in the unit-testing process. Numeric is required if you want to \nuse blitz(), but isn't necessary for inline() or \next_tools\n

\nFor Windows users, it's even easier. You can download the click-install .exe \nfile and run it for automatic installation. There is also a .zip file of the\nsource for those interested. It also includes a setup.py file to simplify\ninstallation. \n

\nIf you're using the CVS version, you'll need to install \nscipy_distutils and scipy_test packages (also \navailable from CVS) on your own.\n

\n \nNote: The dependency issue here is a little sticky. I hate to make people \ndownload more than one file (and so I haven't), but distutils doesn't have a \nway to do conditional installation -- at least that I know about. This can \nlead to undesired clobbering of the scipy_test and scipy_distutils modules. \nWhat to do, what to do... Right now it is a very minor issue.\n\n

\n\n

Testing

\nOnce weave is installed, fire up python and run its unit tests.\n\n
\n    >>> import weave\n    >>> weave.test()\n    runs long time... spews tons of output and a few warnings\n    .\n    .\n    .\n    ..............................................................\n    ................................................................\n    ..................................................\n    ----------------------------------------------------------------------\n    Ran 184 tests in 158.418s\n\n    OK\n    \n    >>> \n    
\n\nThis takes a loooong time. On windows, it is usually several minutes. On Unix \nwith remote file systems, I've had it take 15 or so minutes. In the end, it \nshould run about 180 tests and spew some speed results along the way. If you \nget errors, they'll be reported at the end of the output. Please let me know\nwhat if this occurs.\n\nIf you don't have Numeric installed, you'll get some module import errors \nduring the test setup phase for modules that are Numeric specific (blitz_spec, \nblitz_tools, size_check, standard_array_spec, ast_tools), but all test should\npass (about 100 and they should complete in several minutes).\n

\nIf you only want to test a single module of the package, you can do this by\nrunning test() for that specific module.\n\n

\n    >>> import weave.scalar_spec\n    >>> weave.scalar_spec.test()\n    .......\n    ----------------------------------------------------------------------\n    Ran 7 tests in 23.284s\n    
\n\nTesting Notes:\n
    \n
  • \n Windows 1\n

    \n I've had some test fail on windows machines where I have msvc, gcc-2.95.2 \n (in c:\\gcc-2.95.2), and gcc-2.95.3-6 (in c:\\gcc) all installed. My \n environment has c:\\gcc in the path and does not have c:\\gcc-2.95.2 in the \n path. The test process runs very smoothly until the end where several test \n using gcc fail with cpp0 not found by g++. If I check os.system('gcc -v') \n before running tests, I get gcc-2.95.3-6. If I check after running tests \n (and after failure), I get gcc-2.95.2. ??huh??. The os.environ['PATH'] \n still has c:\\gcc first in it and is not corrupted (msvc/distutils messes \n with the environment variables, so we have to undo its work in some \n places). If anyone else sees this, let me know - - it may just be an quirk \n on my machine (unlikely). Testing with the gcc- 2.95.2 installation always \n works.\n

    \n

  • \n
  • \n Windows 2\n

    \n If you run the tests from PythonWin or some other GUI tool, you'll get a\n ton of DOS windows popping up periodically as weave spawns\n the compiler multiple times. Very annoying. Anyone know how to fix this?\n

    \n

  • \n
  • \n wxPython\n

    \n wxPython tests are not enabled by default because importing wxPython on a \n Unix machine without access to a X-term will cause the program to exit. \n Anyone know of a safe way to detect whether wxPython can be imported and \n whether a display exists on a machine? \n

    \n

  • \n

    \n

\n
\n\n\n\n

Benchmarks

\nThis section has a few benchmarks -- thats all people want to see anyway right? \nThese are mostly taken from running files in the weave/example \ndirectory and also from the test scripts. Without more information about what \nthe test actually do, their value is limited. Still, their here for the \ncurious. Look at the example scripts for more specifics about what problem was \nactually solved by each run. These examples are run under windows 2000 using \nMicrosoft Visual C++ and python2.1 on a 850 MHz PIII laptop with 320 MB of RAM.\nSpeed up is the improvement (degredation) factor of weave compared to \nconventional Python functions. The blitz() comparisons are shown\ncompared to Numeric.\n

\n

\n\n\n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n
\n

inline and ext_tools

Algorithm

Speed up

binary search   1.50
fibonacci (recursive)  82.10
fibonacci (loop)   9.17
return None   0.14
map   1.20
dictionary sort   2.54
vector quantization  37.40
\n

blitz -- double precision

Algorithm

Speed up

a = b + c 512x512   3.05
a = b + c + d 512x512   4.59
5 pt avg. filter, 2D Image 512x512   9.01
Electromagnetics (FDTD) 100x100x100   8.61
\n
\n

\n\nThe benchmarks shown blitz in the best possible light. Numeric \n(at least on my machine) is significantly worse for double precision than it is \nfor single precision calculations. If your interested in single precision \nresults, you can pretty much divide the double precision speed up by 3 and you'll\nbe close.\n\n\n

Inline

\n

\ninline() compiles and executes C/C++ code on the fly. Variables \nin the local and global Python scope are also available in the C/C++ code. \nValues are passed to the C/C++ code by assignment much like variables \nare passed into a standard Python function. Values are returned from the C/C++ \ncode through a special argument called return_val. Also, the contents of \nmutable objects can be changed within the C/C++ code and the changes remain \nafter the C code exits and returns to Python. (more on this later)\n

\nHere's a trivial printf example using inline():\n\n

\n    >>> import weave    \n    >>> a  = 1\n    >>> weave.inline('printf(\"%d\\\\n\",a);',['a'])\n    1\n    
\n

\nIn this, its most basic form, inline(c_code, var_list) requires two \narguments. c_code is a string of valid C/C++ code. \nvar_list is a list of variable names that are passed from \nPython into C/C++. Here we have a simple printf statement that \nwrites the Python variable a to the screen. The first time you run \nthis, there will be a pause while the code is written to a .cpp file, compiled \ninto an extension module, loaded into Python, cataloged for future use, and \nexecuted. On windows (850 MHz PIII), this takes about 1.5 seconds when using \nMicrosoft's C++ compiler (MSVC) and 6-12 seconds using gcc (mingw32 2.95.2). \nAll subsequent executions of the code will happen very quickly because the code \nonly needs to be compiled once. If you kill and restart the interpreter and then \nexecute the same code fragment again, there will be a much shorter delay in the \nfractions of seconds range. This is because weave stores a \ncatalog of all previously compiled functions in an on disk cache. When it sees \na string that has been compiled, it loads the already compiled module and \nexecutes the appropriate function. \n

\n\nNote: If you try the printf example in a GUI shell such as IDLE, \nPythonWin, PyShell, etc., you're unlikely to see the output. This is because the \nC code is writing to stdout, instead of to the GUI window. This doesn't mean \nthat inline doesn't work in these environments -- it only means that standard \nout in C is not the same as the standard out for Python in these cases. Non \ninput/output functions will work as expected.\n\n

\nAlthough effort has been made to reduce the overhead associated with calling \ninline, it is still less efficient for simple code snippets than using \nequivalent Python code. The simple printf example is actually \nslower by 30% or so than using Python print statement. And, it is \nnot difficult to create code fragments that are 8-10 times slower using inline \nthan equivalent Python. However, for more complicated algorithms, \nthe speed up can be worth while -- anywhwere from 1.5- 30 times faster. \nAlgorithms that have to manipulate Python objects (sorting a list) usually only \nsee a factor of 2 or so improvement. Algorithms that are highly computational \nor manipulate Numeric arrays can see much larger improvements. The \nexamples/vq.py file shows a factor of 30 or more improvement on the vector \nquantization algorithm that is used heavily in information theory and \nclassification problems.\n

\n\n\n

More with printf

\n

\nMSVC users will actually see a bit of compiler output that distutils does not\nsupress the first time the code executes:\n\n

    \n    >>> weave.inline(r'printf(\"%d\\n\",a);',['a'])\n    sc_e013937dbc8c647ac62438874e5795131.cpp\n       Creating library C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\n       \\Release\\sc_e013937dbc8c647ac62438874e5795131.lib and object C:\\DOCUME\n       ~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_e013937dbc8c64\n       7ac62438874e5795131.exp\n    1\n    
\n

\nNothing bad is happening, its just a bit annoying. Anyone know how to \nturn this off? \n

\nThis example also demonstrates using 'raw strings'. The r \npreceeding the code string in the last example denotes that this is a 'raw \nstring'. In raw strings, the backslash character is not interpreted as an \nescape character, and so it isn't necessary to use a double backslash to \nindicate that the '\\n' is meant to be interpreted in the C printf \nstatement instead of by Python. If your C code contains a lot\nof strings and control characters, raw strings might make things easier.\nMost of the time, however, standard strings work just as well.\n\n

\nThe printf statement in these examples is formatted to print \nout integers. What happens if a is a string? inline\nwill happily, compile a new version of the code to accept strings as input,\nand execute the code. The result?\n\n

    \n    >>> a = 'string'\n    >>> weave.inline(r'printf(\"%d\\n\",a);',['a'])\n    32956972\n    
\n

\nIn this case, the result is non-sensical, but also non-fatal. In other \nsituations, it might produce a compile time error because a is \nrequired to be an integer at some point in the code, or it could produce a \nsegmentation fault. Its possible to protect against passing \ninline arguments of the wrong data type by using asserts in \nPython.\n\n

    \n    >>> a = 'string'\n    >>> def protected_printf(a):    \n    ...     assert(type(a) == type(1))\n    ...     weave.inline(r'printf(\"%d\\n\",a);',['a'])\n    >>> protected_printf(1)\n     1\n    >>> protected_printf('string')\n    AssertError...\n    
\n\n

\nFor printing strings, the format statement needs to be changed. Also, weave\ndoesn't convert strings to char*. Instead it uses CXX Py::String type, so \nyou have to do a little more work. Here we convert it to a C++ std::string\nand then ask cor the char* version.\n\n

    \n    >>> a = 'string'    \n    >>> weave.inline(r'printf(\"%s\\n\",std::string(a).c_str());',['a'])\n    string\n    
\n

\n \nThis is a little convoluted. Perhaps strings should convert to std::string\nobjects instead of CXX objects. Or maybe to char*.\n\n\n

\nAs in this case, C/C++ code fragments often have to change to accept different \ntypes. For the given printing task, however, C++ streams provide a way of a \nsingle statement that works for integers and strings. By default, the stream \nobjects live in the std (standard) namespace and thus require the use of \nstd::.\n\n

    \n    >>> weave.inline('std::cout << a << std::endl;',['a'])\n    1    \n    >>> a = 'string'\n    >>> weave.inline('std::cout << a << std::endl;',['a'])\n    string\n    
\n \n

\nExamples using printf and cout are included in \nexamples/print_example.py.\n\n\n

More examples

\n\nThis section shows several more advanced uses of inline. It \nincludes a few algorithms from the Python Cookbook \nthat have been re-written in inline C to improve speed as well as a couple \nexamples using Numeric and wxPython.\n\n\n

Binary search

\nLets look at the example of searching a sorted list of integers for a value. \nFor inspiration, we'll use Kalle Svensson's \nbinary_search() algorithm from the Python Cookbook. His recipe follows:\n\n
\n    def binary_search(seq, t):\n        min = 0; max = len(seq) - 1\n        while 1:\n            if max < min:\n                return -1\n            m = (min  + max)  / 2\n            if seq[m] < t: \n                min = m  + 1 \n            elif seq[m] > t: \n                max = m  - 1 \n            else:\n                return m    \n    
\n\nThis Python version works for arbitrary Python data types. The C version below is \nspecialized to handle integer values. There is a little type checking done in \nPython to assure that we're working with the correct data types before heading \ninto C. The variables seq and t don't need to be \ndeclared beacuse weave handles converting and declaring them in \nthe C code. All other temporary variables such as min, max, etc. \nmust be declared -- it is C after all. Here's the new mixed Python/C function:\n\n
    \n    def c_int_binary_search(seq,t):\n        # do a little type checking in Python\n        assert(type(t) == type(1))\n        assert(type(seq) == type([]))\n        \n        # now the C code\n        code = \"\"\"\n               #line 29 \"binary_search.py\"\n               int val, m, min = 0;  \n               int max = seq.length() - 1;\n               PyObject *py_val; \n               for(;;)\n               {\n                   if (max < min  ) \n                   { \n                       return_val =  Py::new_reference_to(Py::Int(-1)); \n                       break;\n                   } \n                   m =  (min + max) /2;\n                   val =    py_to_int(PyList_GetItem(seq.ptr(),m),\"val\"); \n                   if (val  < t) \n                       min = m  + 1;\n                   else if (val >  t)\n                       max = m - 1;\n                   else\n                   {\n                       return_val = Py::new_reference_to(Py::Int(m));\n                       break;\n                   }\n               }\n               \"\"\"\n        return inline(code,['seq','t'])\n    
\n

\nWe have two variables seq and t passed in. \nt is guaranteed (by the assert) to be an integer. \nPython integers are converted to C int types in the transition from Python to \nC. seq is a Python list. By default, it is translated to a CXX \nlist object. Full documentation for the CXX library can be found at its website. The basics are that the CXX \nprovides C++ class equivalents for Python objects that simplify, or at \nleast object orientify, working with Python objects in C/C++. For example, \nseq.length() returns the length of the list. A little more about\nCXX and its class methods, etc. is in the ** type conversions ** section.\n

\n\nNote: CXX uses templates and therefore may be a little less portable than \nanother alternative by Gordan McMillan called SCXX which was inspired by\nCXX. It doesn't use templates so it should compile faster and be more portable.\nSCXX has a few less features, but it appears to me that it would mesh with\nthe needs of weave quite well. Hopefully xxx_spec files will be written\nfor SCXX in the future, and we'll be able to compare on a more empirical\nbasis. Both sets of spec files will probably stick around, it just a question\nof which becomes the default.\n\n

\nMost of the algorithm above looks similar in C to the original Python code. \nThere are two main differences. The first is the setting of \nreturn_val instead of directly returning from the C code with a \nreturn statement. return_val is an automatically \ndefined variable of type PyObject* that is returned from the C \ncode back to Python. You'll have to handle reference counting issues when \nsetting this variable. In this example, CXX classes and functions handle the \ndirty work. All CXX functions and classes live in the namespace \nPy::. The following code converts the integer m to a \nCXX Int() object and then to a PyObject* with an \nincremented reference count using Py::new_reference_to().\n\n

   \n    return_val = Py::new_reference_to(Py::Int(m));\n    
\n

\nThe second big differences shows up in the retrieval of integer values from the \nPython list. The simple Python seq[i] call balloons into a C \nPython API call to grab the value out of the list and then a separate call to \npy_to_int() that converts the PyObject* to an integer. \npy_to_int() includes both a NULL cheack and a \nPyInt_Check() call as well as the conversion call. If either of \nthe checks fail, an exception is raised. The entire C++ code block is executed \nwith in a try/catch block that handles exceptions much like Python \ndoes. This removes the need for most error checking code.\n

\nIt is worth note that CXX lists do have indexing operators that result \nin code that looks much like Python. However, the overhead in using them \nappears to be relatively high, so the standard Python API was used on the \nseq.ptr() which is the underlying PyObject* of the \nList object.\n

\nThe #line directive that is the first line of the C code \nblock isn't necessary, but it's nice for debugging. If the compilation fails \nbecause of the syntax error in the code, the error will be reported as an error \nin the Python file \"binary_search.py\" with an offset from the given line number \n(29 here).\n

\nSo what was all our effort worth in terms of efficiency? Well not a lot in \nthis case. The examples/binary_search.py file runs both Python and C versions \nof the functions As well as using the standard bisect module. If \nwe run it on a 1 million element list and run the search 3000 times (for 0-\n2999), here are the results we get:\n\n

   \n    C:\\home\\ej\\wrk\\scipy\\weave\\examples> python binary_search.py\n    Binary search for 3000 items in 1000000 length list of integers:\n     speed in python: 0.159999966621\n     speed of bisect: 0.121000051498\n     speed up: 1.32\n     speed in c: 0.110000014305\n     speed up: 1.45\n     speed in c(no asserts): 0.0900000333786\n     speed up: 1.78\n    
\n

\nSo, we get roughly a 50-75% improvement depending on whether we use the Python \nasserts in our C version. If we move down to searching a 10000 element list, \nthe advantage evaporates. Even smaller lists might result in the Python \nversion being faster. I'd like to say that moving to Numeric lists (and \ngetting rid of the GetItem() call) offers a substantial speed up, but my \npreliminary efforts didn't produce one. I think the log(N) algorithm is to \nblame. Because the algorithm is nice, there just isn't much time spent \ncomputing things, so moving to C isn't that big of a win. If there are ways to \nreduce conversion overhead of values, this may improve the C/Python speed \nup. Anyone have other explanations or faster code, please let me know.\n\n\n

Dictionary Sort

\n

\nThe demo in examples/dict_sort.py is another example from the Python CookBook. \nThis \nsubmission, by Alex Martelli, demonstrates how to return the values from a \ndictionary sorted by their keys:\n\n

       \n    def sortedDictValues3(adict):\n        keys = adict.keys()\n        keys.sort()\n        return map(adict.get, keys)\n    
\n

\nAlex provides 3 algorithms and this is the 3rd and fastest of the set. The C \nversion of this same algorithm follows:\n\n

       \n    def c_sort(adict):\n        assert(type(adict) == type({}))\n        code = \"\"\"     \n        #line 21 \"dict_sort.py\"  \n        Py::List keys = adict.keys();\n        Py::List items(keys.length()); keys.sort();     \n        PyObject* item = NULL; \n        for(int i = 0;  i < keys.length();i++)\n        {\n            item = PyList_GET_ITEM(keys.ptr(),i);\n            item = PyDict_GetItem(adict.ptr(),item);\n            Py_XINCREF(item);\n            PyList_SetItem(items.ptr(),i,item);              \n        }           \n        return_val = Py::new_reference_to(items);\n        \"\"\"   \n        return inline_tools.inline(code,['adict'],verbose=1)\n    
\n

\nLike the original Python function, the C++ version can handle any Python \ndictionary regardless of the key/value pair types. It uses CXX objects for the \nmost part to declare python types in C++, but uses Python API calls to manipulate \ntheir contents. Again, this choice is made for speed. The C++ version, while\nmore complicated, is about a factor of 2 faster than Python.\n\n

       \n    C:\\home\\ej\\wrk\\scipy\\weave\\examples> python dict_sort.py\n    Dict sort of 1000 items for 300 iterations:\n     speed in python: 0.319999933243\n    [0, 1, 2, 3, 4]\n     speed in c: 0.151000022888\n     speed up: 2.12\n    [0, 1, 2, 3, 4]\n    
\n

\n\n

Numeric -- cast/copy/transpose

\n\nCastCopyTranspose is a function called quite heavily by Linear Algebra routines\nin the Numeric library. Its needed in part because of the row-major memory layout\nof multi-demensional Python (and C) arrays vs. the col-major order of the underlying\nFortran algorithms. For small matrices (say 100x100 or less), a significant\nportion of the common routines such as LU decompisition or singular value decompostion\nare spent in this setup routine. This shouldn't happen. Here is the Python\nversion of the function using standard Numeric operations.\n\n
       \n    def _castCopyAndTranspose(type, array):\n        if a.typecode() == type:\n            cast_array = copy.copy(Numeric.transpose(a))\n        else:\n            cast_array = copy.copy(Numeric.transpose(a).astype(type))\n        return cast_array\n    
\n\nAnd the following is a inline C version of the same function:\n\n
\n    from weave.blitz_tools import blitz_type_factories\n    from weave import scalar_spec\n    from weave import inline\n    def _cast_copy_transpose(type,a_2d):\n        assert(len(shape(a_2d)) == 2)\n        new_array = zeros(shape(a_2d),type)\n        numeric_type = scalar_spec.numeric_to_blitz_type_mapping[type]\n        code = \\\n        \"\"\"  \n        for(int i = 0;i < _Na_2d[0]; i++)  \n            for(int j = 0;  j < _Na_2d[1]; j++)\n                new_array(i,j) = (%s) a_2d(j,i);\n        \"\"\" % numeric_type\n        inline(code,['new_array','a_2d'],\n               type_factories = blitz_type_factories,compiler='gcc')\n        return new_array\n    
\n\nThis example uses blitz++ arrays instead of the standard representation of \nNumeric arrays so that indexing is simplier to write. This is accomplished by \npassing in the blitz++ \"type factories\" to override the standard Python to C++ \ntype conversions. Blitz++ arrays allow you to write clean, fast code, but they \nalso are sloooow to compile (20 seconds or more for this snippet). This is why \nthey aren't the default type used for Numeric arrays (and also because most \ncompilers can't compile blitz arrays...). inline() is also forced \nto use 'gcc' as the compiler because the default compiler on Windows (MSVC) \nwill not compile blitz code. 'gcc' I think will use the standard compiler \non Unix machine instead of explicitly forcing gcc (check this) \n\nComparisons of the Python vs inline C++ code show a factor of 3 speed up. Also \nshown are the results of an \"inplace\" transpose routine that can be used if the \noutput of the linear algebra routine can overwrite the original matrix (this is \noften appropriate). This provides another factor of 2 improvement.\n\n
\n     #C:\\home\\ej\\wrk\\scipy\\weave\\examples> python cast_copy_transpose.py\n    # Cast/Copy/Transposing (150,150)array 1 times\n    #  speed in python: 0.870999932289\n    #  speed in c: 0.25\n    #  speed up: 3.48\n    #  inplace transpose c: 0.129999995232\n    #  speed up: 6.70\n    
\n\n\n

wxPython

\n\ninline knows how to handle wxPython objects. Thats nice in and of\nitself, but it also demonstrates that the type conversion mechanism is reasonably \nflexible. Chances are, it won't take a ton of effort to support special types\nyou might have. The examples/wx_example.py borrows the scrolled window\nexample from the wxPython demo, accept that it mixes inline C code in the middle\nof the drawing function.\n\n
\n    def DoDrawing(self, dc):\n        \n        red = wxNamedColour(\"RED\");\n        blue = wxNamedColour(\"BLUE\");\n        grey_brush = wxLIGHT_GREY_BRUSH;\n        code = \\\n        \"\"\"\n        #line 108 \"wx_example.py\" \n        dc->BeginDrawing();\n        dc->SetPen(wxPen(*red,4,wxSOLID));\n        dc->DrawRectangle(5,5,50,50);\n        dc->SetBrush(*grey_brush);\n        dc->SetPen(wxPen(*blue,4,wxSOLID));\n        dc->DrawRectangle(15, 15, 50, 50);\n        \"\"\"\n        inline(code,['dc','red','blue','grey_brush'])\n        \n        dc.SetFont(wxFont(14, wxSWISS, wxNORMAL, wxNORMAL))\n        dc.SetTextForeground(wxColour(0xFF, 0x20, 0xFF))\n        te = dc.GetTextExtent(\"Hello World\")\n        dc.DrawText(\"Hello World\", 60, 65)\n\n        dc.SetPen(wxPen(wxNamedColour('VIOLET'), 4))\n        dc.DrawLine(5, 65+te[1], 60+te[0], 65+te[1])\n        ...\n    
\n\nHere, some of the Python calls to wx objects were just converted to C++ calls. There\nisn't any benefit, it just demonstrates the capabilities. You might want to use this\nif you have a computationally intensive loop in your drawing code that you want to \nspeed up.\n\nOn windows, you'll have to use the MSVC compiler if you use the standard wxPython\nDLLs distributed by Robin Dunn. Thats because MSVC and gcc, while binary\ncompatible in C, are not binary compatible for C++. In fact, its probably best, no \nmatter what platform you're on, to specify that inline use the same\ncompiler that was used to build wxPython to be on the safe side. There isn't currently\na way to learn this info from the library -- you just have to know. Also, at least\non the windows platform, you'll need to install the wxWindows libraries and link to \nthem. I think there is a way around this, but I haven't found it yet -- I get some\nlinking errors dealing with wxString. One final note. You'll probably have to\ntweak weave/wx_spec.py or weave/wx_info.py for your machine's configuration to\npoint at the correct directories etc. There. That should sufficiently scare people\ninto not even looking at this... :)\n\n
\n

Keyword Options

\n

\nThe basic definition of the inline() function has a slew of \noptional variables. It also takes keyword arguments that are passed to \ndistutils as compiler options. The following is a formatted \ncut/paste of the argument section of inline's doc-string. It \nexplains all of the variables. Some examples using various options will \nfollow.\n\n

       \n    def inline(code,arg_names,local_dict = None, global_dict = None, \n               force = 0, \n               compiler='',\n               verbose = 0, \n               support_code = None,\n               customize=None, \n               type_factories = None, \n               auto_downcast=1,\n               **kw):\n    
\n\n \ninline has quite \na few options as listed below. Also, the keyword arguments for distutils \nextension modules are accepted to specify extra information needed for \ncompiling. \n
\n

inline Arguments:

\n
\n
\n
code
\n \n
\nstring. A string of valid C++ code. It should not \n specify a return statement. Instead it should assign results that need to be \n returned to Python in the return_val. \n
\n\n
arg_names
\n \n
\nlist of strings. A list of Python variable names \n that should be transferred from Python into the C/C++ code. \n
\n\n
local_dict
\n \n
\noptional. dictionary. If specified, it is a \n dictionary of values that should be used as the local scope for the C/C++ \n code. If local_dict is not specified the local dictionary of the calling \n function is used. \n
\n\n
global_dict
\n \n
\noptional. dictionary. If specified, it is a \n dictionary of values that should be used as the global scope for the C/C++ \n code. If global_dict is not specified the global dictionary of the calling \n function is used. \n
\n\n
force
\n \n
\noptional. 0 or 1. default 0. If 1, the C++ code is \n compiled every time inline is called. This is really only useful for \n debugging, and probably only useful if you're editing support_code a lot. \n
\n\n
compiler
\n \n
\noptional. string. The name of compiler to use when compiling. On windows, it \nunderstands 'msvc' and 'gcc' as well as all the compiler names understood by \ndistutils. On Unix, it'll only understand the values understoof by distutils. \n(I should add 'gcc' though to this).\n

\nOn windows, the compiler defaults to the Microsoft C++ compiler. If this isn't \navailable, it looks for mingw32 (the gcc compiler).\n

\nOn Unix, it'll probably use the same compiler that was used when compiling \nPython. Cygwin's behavior should be similar.

\n
\n\n
verbose
\n \n
\noptional. 0,1, or 2. defualt 0. Speficies how much \n much information is printed during the compile phase of inlining code. 0 is \n silent (except on windows with msvc where it still prints some garbage). 1 \n informs you when compiling starts, finishes, and how long it took. 2 prints \n out the command lines for the compilation process and can be useful if you're \n having problems getting code to work. Its handy for finding the name of the \n .cpp file if you need to examine it. verbose has no affect if the \n compilation isn't necessary. \n
\n\n
support_code
\n \n
\noptional. string. A string of valid C++ code \n declaring extra code that might be needed by your compiled function. This \n could be declarations of functions, classes, or structures. \n
\n\n
customize
\n \n
\noptional. base_info.custom_info object. An \n alternative way to specifiy support_code, headers, etc. needed by the \n function see the weave.base_info module for more details. (not sure \n this'll be used much). \n \n
\n
type_factories
\n \n
\noptional. list of type specification factories. These guys are what convert \nPython data types to C/C++ data types. If you'd like to use a different set of \ntype conversions than the default, specify them here. Look in the type \nconversions section of the main documentation for examples.\n
\n
auto_downcast
\n \n
\noptional. 0 or 1. default 1. This only affects functions that have Numeric \narrays as input variables. Setting this to 1 will cause all floating point \nvalues to be cast as float instead of double if all the Numeric arrays are of \ntype float. If even one of the arrays has type double or double complex, all \nvariables maintain there standard types.\n
\n
\n
\n\n

Distutils keywords:

\n
\ninline() also accepts a number of distutils keywords \nfor controlling how the code is compiled. The following descriptions have been \ncopied from Greg Ward's distutils.extension.Extension class doc-\nstrings for convenience:\n\n
\n
sources
\n \n
\n[string] list of source filenames, relative to the \n distribution root (where the setup script lives), in Unix form \n (slash-separated) for portability. Source files may be C, C++, SWIG (.i), \n platform-specific resource files, or whatever else is recognized by the \n \"build_ext\" command as source for a Python extension. Note: The module_path \n file is always appended to the front of this list \n
\n\n
include_dirs
\n \n
\n[string] list of directories to search for C/C++ \n header files (in Unix form for portability) \n
\n\n
define_macros
\n \n
\n[(name : string, value : string|None)] list of \n macros to define; each macro is defined using a 2-tuple, where 'value' is \n either the string to define it to or None to define it without a particular \n value (equivalent of \"#define FOO\" in source or -DFOO on Unix C compiler \n command line) \n
\n
undef_macros
\n \n
\n[string] list of macros to undefine explicitly \n
\n
library_dirs
\n
\n[string] list of directories to search for C/C++ libraries at link time \n
\n
libraries
\n
\n[string] list of library names (not filenames or paths) to link against \n
\n
runtime_library_dirs
\n
\n[string] list of directories to search for C/C++ libraries at run time (for \nshared extensions, this is when the extension is loaded) \n
\n\n
extra_objects
\n \n
\n[string] list of extra files to link with (eg. \n object files not implied by 'sources', static library that must be \n explicitly specified, binary resource files, etc.) \n
\n\n
extra_compile_args
\n \n
\n[string] any extra platform- and compiler-specific \n information to use when compiling the source files in 'sources'. For \n platforms and compilers where \"command line\" makes sense, this is typically \n a list of command-line arguments, but for other platforms it could be \n anything. \n
\n
extra_link_args
\n \n
\n[string] any extra platform- and compiler-specific \n information to use when linking object files together to create the \n extension (or to create a new static Python interpreter). Similar \n interpretation as for 'extra_compile_args'. \n
\n
export_symbols
\n \n
\n[string] list of symbols to be exported from a shared extension. Not used on \nall platforms, and not generally necessary for Python extensions, which \ntypically export exactly one symbol: \"init\" + extension_name. \n
\n
\n
\n\n\n

Keyword Option Examples

\nWe'll walk through several examples here to demonstrate the behavior of \ninline and also how the various arguments are used.\n\nIn the simplest (most) cases, code and arg_names\nare the only arguments that need to be specified. Here's a simple example\nrun on Windows machine that has Microsoft VC++ installed.\n\n
\n    >>> from weave import inline\n    >>> a = 'string'\n    >>> code = \"\"\"\n    ...        int l = a.length();\n    ...        return_val = Py::new_reference_to(Py::Int(l));\n    ...        \"\"\"\n    >>> inline(code,['a'])\n     sc_86e98826b65b047ffd2cd5f479c627f12.cpp\n    Creating\n       library C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_86e98826b65b047ffd2cd5f479c627f12.lib\n    and object C:\\DOCUME~ 1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_86e98826b65b047ff\n    d2cd5f479c627f12.exp\n    6\n    >>> inline(code,['a'])\n    6\n    
\n \nWhen inline is first run, you'll notice that pause and some \ntrash printed to the screen. The \"trash\" is acutually part of the compilers\noutput that distutils does not supress. The name of the extension file, \nsc_bighonkingnumber.cpp, is generated from the md5 check sum\nof the C/C++ code fragment. On Unix or windows machines with only\ngcc installed, the trash will not appear. On the second call, the code \nfragment is not compiled since it already exists, and only the answer is \nreturned. Now kill the interpreter and restart, and run the same code with\na different string.\n\n
\n    >>> from weave import inline\n    >>> a = 'a longer string' \n    >>> code = \"\"\" \n    ...        int l = a.length();\n    ...        return_val = Py::new_reference_to(Py::Int(l));  \n    ...        \"\"\"\n    >>> inline(code,['a'])\n    15\n    
\n

\nNotice this time, inline() did not recompile the code because it\nfound the compiled function in the persistent catalog of functions. There is\na short pause as it looks up and loads the function, but it is much shorter \nthan compiling would require.\n

\nYou can specify the local and global dictionaries if you'd like (much like \nexec or eval() in Python), but if they aren't \nspecified, the \"expected\" ones are used -- i.e. the ones from the function that \ncalled inline() . This is accomplished through a little call \nframe trickery. Here is an example where the local_dict is specified using\nthe same code example from above:\n\n

\n    >>> a = 'a longer string'\n    >>> b = 'an even  longer string' \n    >>> my_dict = {'a':b}\n    >>> inline(code,['a'])\n    15\n    >>> inline(code,['a'],my_dict)\n    21\n    
\n \n

\nEverytime, the code is changed, inline does a \nrecompile. However, changing any of the other options in inline does not\nforce a recompile. The force option was added so that one\ncould force a recompile when tinkering with other variables. In practice,\nit is just as easy to change the code by a single character\n(like adding a space some place) to force the recompile. Note: It also \nmight be nice to add some methods for purging the cache and on disk \ncatalogs.\n

\nI use verbose sometimes for debugging. When set to 2, it'll \noutput all the information (including the name of the .cpp file) that you'd\nexpect from running a make file. This is nice if you need to examine the\ngenerated code to see where things are going haywire. Note that error\nmessages from failed compiles are printed to the screen even if verbose\n is set to 0.\n

\nThe following example demonstrates using gcc instead of the standard msvc \ncompiler on windows using same code fragment as above. Because the example has \nalready been compiled, the force=1 flag is needed to make \ninline() ignore the previously compiled version and recompile \nusing gcc. The verbose flag is added to show what is printed out:\n\n

\n    >>>inline(code,['a'],compiler='gcc',verbose=2,force=1)\n    running build_ext    \n    building 'sc_86e98826b65b047ffd2cd5f479c627f13' extension \n    c:\\gcc-2.95.2\\bin\\g++.exe -mno-cygwin -mdll -O2 -w -Wstrict-prototypes -IC:\n    \\home\\ej\\wrk\\scipy\\weave -IC:\\Python21\\Include -c C:\\DOCUME~1\\eric\\LOCAL\n    S~1\\Temp\\python21_compiled\\sc_86e98826b65b047ffd2cd5f479c627f13.cpp -o C:\\D\n    OCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_86e98826b65b04\n    7ffd2cd5f479c627f13.o    \n    skipping C:\\home\\ej\\wrk\\scipy\\weave\\CXX\\cxxextensions.c (C:\\DOCUME~1\\eri\n    c\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\cxxextensions.o up-to-date)\n    skipping C:\\home\\ej\\wrk\\scipy\\weave\\CXX\\cxxsupport.cxx (C:\\DOCUME~1\\eric\n    \\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\cxxsupport.o up-to-date)\n    skipping C:\\home\\ej\\wrk\\scipy\\weave\\CXX\\IndirectPythonInterface.cxx (C:\\\n    DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\indirectpythonin\n    terface.o up-to-date)\n    skipping C:\\home\\ej\\wrk\\scipy\\weave\\CXX\\cxx_extensions.cxx (C:\\DOCUME~1\\\n    eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\cxx_extensions.o up-to-da\n    te)\n    writing C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_86\n    e98826b65b047ffd2cd5f479c627f13.def\n    c:\\gcc-2.95.2\\bin\\dllwrap.exe --driver-name g++ -mno-cygwin -mdll -static -\n    -output-lib C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\l\n    ibsc_86e98826b65b047ffd2cd5f479c627f13.a --def C:\\DOCUME~1\\eric\\LOCALS~1\\Te\n    mp\\python21_compiled\\temp\\Release\\sc_86e98826b65b047ffd2cd5f479c627f13.def \n    -s C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_86e9882\n    6b65b047ffd2cd5f479c627f13.o C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compil\n    ed\\temp\\Release\\cxxextensions.o C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_com\n    piled\\temp\\Release\\cxxsupport.o C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_com\n    piled\\temp\\Release\\indirectpythoninterface.o C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\n    \\python21_compiled\\temp\\Release\\cxx_extensions.o -LC:\\Python21\\libs -lpytho\n    n21 -o C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\sc_86e98826b65b047f\n    fd2cd5f479c627f13.pyd\n    15\n    
\n\nThat's quite a bit of output. verbose=1 just prints the compile\ntime.\n\n
\n    >>>inline(code,['a'],compiler='gcc',verbose=1,force=1)\n    Compiling code...\n    finished compiling (sec):  6.00800001621\n    15\n    
\n\n

\n Note: I've only used the compiler option for switching between 'msvc'\nand 'gcc' on windows. It may have use on Unix also, but I don't know yet.\n\n\n

\nThe support_code argument is likely to be used a lot. It allows \nyou to specify extra code fragments such as function, structure or class \ndefinitions that you want to use in the code string. Note that \nchanges to support_code do not force a recompile. The \ncatalog only relies on code (for performance reasons) to determine \nwhether recompiling is necessary. So, if you make a change to support_code, \nyou'll need to alter code in some way or use the \nforce argument to get the code to recompile. I usually just add \nsome inocuous whitespace to the end of one of the lines in code \nsomewhere. Here's an example of defining a separate method for calculating\nthe string length:\n\n

\n    >>> from weave import inline\n    >>> a = 'a longer string'\n    >>> support_code = \"\"\"\n    ...                PyObject* length(Py::String a)\n    ...                {\n    ...                    int l = a.length();  \n    ...                    return Py::new_reference_to(Py::Int(l)); \n    ...                }\n    ...                \"\"\"        \n    >>> inline(\"return_val = length(a);\",['a'],\n    ...        support_code = support_code)\n    15\n    
\n

\ncustomize is a left over from a previous way of specifying \ncompiler options. It is a custom_info object that can specify \nquite a bit of information about how a file is compiled. These \ninfo objects are the standard way of defining compile information \nfor type conversion classes. However, I don't think they are as handy here, \nespecially since we've exposed all the keyword arguments that distutils can \nhandle. Between these keywords, and the support_code option, I \nthink customize may be obsolete. We'll see if anyone cares to use \nit. If not, it'll get axed in the next version.\n

\nThe type_factories variable is important to people who want to\ncustomize the way arguments are converted from Python to C. We'll talk about\nthis in the next chapter **xx** of this document when we discuss type\nconversions.\n

\nauto_downcast handles one of the big type conversion issues that\nis common when using Numeric arrays in conjunction with Python scalar values.\nIf you have an array of single precision values and multiply that array by a \nPython scalar, the result is upcast to a double precision array because the\nscalar value is double precision. This is not usually the desired behavior\nbecause it can double your memory usage. auto_downcast goes\nsome distance towards changing the casting precedence of arrays and scalars.\nIf your only using single precision arrays, it will automatically downcast all\nscalar values from double to single precision when they are passed into the\nC++ code. This is the default behavior. If you want all values to keep there\ndefault type, set auto_downcast to 0.\n

\n\n\n\n

Returning Values

\n\nPython variables in the local and global scope transfer seemlessly from Python \ninto the C++ snippets. And, if inline were to completely live up\nto its name, any modifications to variables in the C++ code would be reflected\nin the Python variables when control was passed back to Python. For example,\nthe desired behavior would be something like:\n\n
\n    # THIS DOES NOT WORK\n    >>> a = 1\n    >>> weave.inline(\"a++;\",['a'])\n    >>> a\n    2\n    
\n\nInstead you get:\n\n
\n    >>> a = 1\n    >>> weave.inline(\"a++;\",['a'])\n    >>> a\n    1\n    
\n \nVariables are passed into C++ as if you are calling a Python function. Python's \ncalling convention is sometimes called \"pass by assignment\". This means its as \nif a c_a = a assignment is made right before inline \ncall is made and the c_a variable is used within the C++ code. \nThus, any changes made to c_a are not reflected in Python's \na variable. Things do get a little more confusing, however, when \nlooking at variables with mutable types. Changes made in C++ to the contents \nof mutable types are reflected in the Python variables.\n\n
\n    >>> a= [1,2]\n    >>> weave.inline(\"PyList_SetItem(a.ptr(),0,PyInt_FromLong(3));\",['a'])\n    >>> print a\n    [3, 2]\n    
\n\nSo modifications to the contents of mutable types in C++ are seen when control\nis returned to Python. Modifications to immutable types such as tuples,\nstrings, and numbers do not alter the Python variables.\n\nIf you need to make changes to an immutable variable, you'll need to assign\nthe new value to the \"magic\" variable return_val in C++. This\nvalue is returned by the inline() function:\n\n
\n    >>> a = 1\n    >>> a = weave.inline(\"return_val = Py::new_reference_to(Py::Int(a+1));\",['a'])  \n    >>> a\n    2\n    
\n\nThe return_val variable can also be used to return newly created \nvalues. This is possible by returning a tuple. The following trivial example \nillustrates how this can be done:\n\n
       \n    # python version\n    def multi_return():\n        return 1, '2nd'\n    \n    # C version.\n    def c_multi_return():    \n        code =  \"\"\"\n     \t        Py::Tuple results(2);\n     \t        results[0] = Py::Int(1);\n     \t        results[1] = Py::String(\"2nd\");\n     \t        return_val = Py::new_reference_to(results); \t        \n                \"\"\"\n        return inline_tools.inline(code)\n    
\n

\nThe example is available in examples/tuple_return.py. It also\nhas the dubious honor of demonstrating how much inline() can \nslow things down. The C version here is about 10 times slower than the Python\nversion. Of course, something so trivial has no reason to be written in\nC anyway.\n\n\n

The issue with locals()

\n

\ninline passes the locals() and globals() \ndictionaries from Python into the C++ function from the calling function. It \nextracts the variables that are used in the C++ code from these dictionaries, \nconverts then to C++ variables, and then calculates using them. It seems like \nit would be trivial, then, after the calculations were finished to then insert \nthe new values back into the locals() and globals() \ndictionaries so that the modified values were reflected in Python. \nUnfortunately, as pointed out by the Python manual, the locals() dictionary is \nnot writable. \n

\n\nI suspect locals() is not writable because there are some \noptimizations done to speed lookups of the local namespace. I'm guessing local \nlookups don't always look at a dictionary to find values. Can someone \"in the \nknow\" confirm or correct this? Another thing I'd like to know is whether there \nis a way to write to the local namespace of another stack frame from C/C++. If \nso, it would be possible to have some clean up code in compiled functions that \nwrote final values of variables in C++ back to the correct Python stack frame. \nI think this goes a long way toward making inline truely live up \nto its name. I don't think we'll get to the point of creating variables in \nPython for variables created in C -- although I suppose with a C/C++ parser you \ncould do that also.\n\n

\n\n\n

A quick look at the code

\n\nweave generates a C++ file holding an extension function for \neach inline code snippet. These file names are generated using \nfrom the md5 signature of the code snippet and saved to a location specified by \nthe PYTHONCOMPILED environment variable (discussed later). The cpp files are \ngenerally about 200-400 lines long and include quite a few functions to support \ntype conversions, etc. However, the actual compiled function is pretty simple. \nBelow is the familiar printf example:\n\n
\n    >>> import weave    \n    >>> a = 1\n    >>> weave.inline('printf(\"%d\\\\n\",a);',['a'])\n    1\n    
\n\nAnd here is the extension function generated by inline:\n\n
\n    static PyObject* compiled_func(PyObject*self, PyObject* args)\n    {\n        // The Py_None needs an incref before returning\n        PyObject *return_val = NULL;\n        int exception_occured = 0;\n        PyObject *py__locals = NULL;\n        PyObject *py__globals = NULL;\n        PyObject *py_a;\n        py_a = NULL;\n        \n        if(!PyArg_ParseTuple(args,\"OO:compiled_func\",&py__locals,&py__globals))\n            return NULL;\n        try                              \n        {                                \n            PyObject* raw_locals = py_to_raw_dict(py__locals,\"_locals\");\n            PyObject* raw_globals = py_to_raw_dict(py__globals,\"_globals\");\n            int a = py_to_int (get_variable(\"a\",raw_locals,raw_globals),\"a\");\n            /* Here is the inline code */            \n            printf(\"%d\\n\",a);\n            /* I would like to fill in changed locals and globals here... */\n        }                                       \n        catch( Py::Exception& e)           \n        {                                \n            return_val =  Py::Null();    \n            exception_occured = 1;       \n        }                                 \n        if(!return_val && !exception_occured)\n        {\n                                      \n            Py_INCREF(Py_None);              \n            return_val = Py_None;            \n        }\n        /* clean up code */\n        \n        /* return */                              \n        return return_val;           \n    }                                \n    
\n\nEvery inline function takes exactly two arguments -- the local and global\ndictionaries for the current scope. All variable values are looked up out\nof these dictionaries. The lookups, along with all inline code \nexecution, are done within a C++ try block. If the variables\naren't found, or there is an error converting a Python variable to the \nappropriate type in C++, an exception is raised. The C++ exception\nis automatically converted to a Python exception by CXX and returned to Python.\n\nThe py_to_int() function illustrates how the conversions and\nexception handling works. py_to_int first checks that the given PyObject*\npointer is not NULL and is a Python integer. If all is well, it calls the\nPython API to convert the value to an int. Otherwise, it calls\nhandle_bad_type() which gathers information about what went wrong\nand then raises a CXX TypeError which returns to Python as a TypeError.\n\n
\n    int py_to_int(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyInt_Check(py_obj))\n            handle_bad_type(py_obj,\"int\", name);\n        return (int) PyInt_AsLong(py_obj);\n    }\n    
\n\n
\n    void handle_bad_type(PyObject* py_obj, char* good_type, char*  var_name)\n    {\n        char msg[500];\n        sprintf(msg,\"received '%s' type instead of '%s' for variable '%s'\",\n                find_type(py_obj),good_type,var_name);\n        throw Py::TypeError(msg);\n    }\n    \n    char* find_type(PyObject* py_obj)\n    {\n        if(py_obj == NULL) return \"C NULL value\";\n        if(PyCallable_Check(py_obj)) return \"callable\";\n        if(PyString_Check(py_obj)) return \"string\";\n        if(PyInt_Check(py_obj)) return \"int\";\n        if(PyFloat_Check(py_obj)) return \"float\";\n        if(PyDict_Check(py_obj)) return \"dict\";\n        if(PyList_Check(py_obj)) return \"list\";\n        if(PyTuple_Check(py_obj)) return \"tuple\";\n        if(PyFile_Check(py_obj)) return \"file\";\n        if(PyModule_Check(py_obj)) return \"module\";\n        \n        //should probably do more interagation (and thinking) on these.\n        if(PyCallable_Check(py_obj) && PyInstance_Check(py_obj)) return \"callable\";\n        if(PyInstance_Check(py_obj)) return \"instance\"; \n        if(PyCallable_Check(py_obj)) return \"callable\";\n        return \"unkown type\";\n    }\n    
\n\nSince the inline is also executed within the try/catch\nblock, you can use CXX exceptions within your code. It is usually a bad idea\nto directly return from your code, even if an error occurs. This\nskips the clean up section of the extension function. In this simple example,\nthere isn't any clean up code, but in more complicated examples, there may\nbe some reference counting that needs to be taken care of here on converted\nvariables. To avoid this, either uses exceptions or set \nreturn_val to NULL and use if/then's to skip code\nafter errors.\n\n\n

Technical Details

\n

\nThere are several main steps to using C/C++ code withing Python:\n

    \n
  1. Type conversion \n
  2. Generating C/C++ code \n
  3. Compile the code to an extension module \n
  4. Catalog (and cache) the function for future use
  5. \n
\n

\nItems 1 and 2 above are related, but most easily discussed separately. Type \nconversions are customizable by the user if needed. Understanding them is \npretty important for anything beyond trivial uses of inline. \nGenerating the C/C++ code is handled by ext_function and \next_module classes and . For the most part, compiling the code is \nhandled by distutils. Some customizations were needed, but they were \nrelatively minor and do not require changes to distutils itself. Cataloging is \npretty simple in concept, but surprisingly required the most code to implement \n(and still likely needs some work). So, this section covers items 1 and 4 from \nthe list. Item 2 is covered later in the chapter covering the \next_tools module, and distutils is covered by a completely \nseparate document xxx.\n\n

Passing Variables in/out of the C/C++ code

\n\nNote: Passing variables into the C code is pretty straight forward, but there \nare subtlties to how variable modifications in C are returned to Python. see Returning Values for a more thorough discussion of \nthis issue.\n \n \n\n

Type Conversions

\n\n\nNote: Maybe xxx_converter instead of \nxxx_specification is a more descriptive name. Might change in \nfuture version?\n\n\n

\nBy default, inline() makes the following type conversions between\nPython and C++ types.\n

\n\n

\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n

Default Data Type Conversions

\n

Python

\n

C++

   int   int
   float   double
   complex   std::complex
   string   Py::String
   list   Py::List
   dict   Py::Dict
   tuple   Py::Tuple
   file   FILE*
   callable   PyObject*
   instance   PyObject*
   Numeric.array   PyArrayObject*
   wxXXX   wxXXX*
\n
\n

\nThe Py:: namespace is defined by the \nCXX library which has C++ class\nequivalents for many Python types. std:: is the namespace of the\nstandard library in C++.\n

\n\nNote: \n

    \n
  • I haven't figured out how to handle long int yet (I think they are currenlty converted \n to int - - check this). \n \n
  • \nHopefully VTK will be added to the list soon
  • \n
\n\n

\n\nPython to C++ conversions fill in code in several locations in the generated\ninline extension function. Below is the basic template for the\nfunction. This is actually the exact code that is generated by calling\nweave.inline(\"\").\n\n

\n    static PyObject* compiled_func(PyObject*self, PyObject* args)\n    {\n        PyObject *return_val = NULL;\n        int exception_occured = 0;\n        PyObject *py__locals = NULL;\n        PyObject *py__globals = NULL;\n        PyObject *py_a;\n        py_a = NULL;\n    \n        if(!PyArg_ParseTuple(args,\"OO:compiled_func\",&py__locals,&py__globals))\n            return NULL;\n        try\n        {\n            PyObject* raw_locals = py_to_raw_dict(py__locals,\"_locals\");\n            PyObject* raw_globals = py_to_raw_dict(py__globals,\"_globals\");\n            /* argument conversion code */\n            /* inline code */\n            /*I would like to fill in changed locals and globals here...*/\n    \n        }\n        catch( Py::Exception& e)\n        {\n            return_val =  Py::Null();\n            exception_occured = 1;\n        }\n        /* cleanup code */\n        if(!return_val && !exception_occured)\n        {\n    \n            Py_INCREF(Py_None);\n            return_val = Py_None;\n        }\n    \n        return return_val;\n    }\n    
\n\nThe /* inline code */ section is filled with the code passed to\nthe inline() function call. The \n/*argument convserion code*/ and /* cleanup code */\nsections are filled with code that handles conversion from Python to C++\ntypes and code that deallocates memory or manipulates reference counts before\nthe function returns. The following sections demostrate how these two areas\nare filled in by the default conversion methods.\n\n \nNote: I'm not sure I have reference counting correct on a few of these. The \nonly thing I increase/decrease the ref count on is Numeric arrays. If you\nsee an issue, please let me know.\n\n\n\n

Numeric Argument Conversion

\n\nInteger, floating point, and complex arguments are handled in a very similar\nfashion. Consider the following inline function that has a single integer \nvariable passed in:\n\n
\n    >>> a = 1\n    >>> inline(\"\",['a'])\n    
\n\nThe argument conversion code inserted for a is:\n\n
\n    /* argument conversion code */\n    int a = py_to_int (get_variable(\"a\",raw_locals,raw_globals),\"a\");\n    
\n\nget_variable() reads the variable a\nfrom the local and global namespaces. py_to_int() has the following\nform:\n\n
\n    static int py_to_int(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyInt_Check(py_obj))\n            handle_bad_type(py_obj,\"int\", name);\n        return (int) PyInt_AsLong(py_obj);\n    }\n    
\n\nSimilarly, the float and complex conversion routines look like:\n\n
    \n    static double py_to_float(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyFloat_Check(py_obj))\n            handle_bad_type(py_obj,\"float\", name);\n        return PyFloat_AsDouble(py_obj);\n    }\n    \n    static std::complex py_to_complex(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyComplex_Check(py_obj))\n            handle_bad_type(py_obj,\"complex\", name);\n        return std::complex(PyComplex_RealAsDouble(py_obj),\n                                    PyComplex_ImagAsDouble(py_obj));    \n    }\n    
\n\nNumeric conversions do not require any clean up code.\n\n\n

String, List, Tuple, and Dictionary Conversion

\n\nStrings, Lists, Tuples and Dictionary conversions are all converted to \nCXX types by default.\n\nFor the following code, \n\n
\n    >>> a = [1]\n    >>> inline(\"\",['a'])\n    
\n\nThe argument conversion code inserted for a is:\n\n
\n    /* argument conversion code */\n    Py::List a = py_to_list (get_variable(\"a\",raw_locals,raw_globals),\"a\");\n    
\n\nget_variable() reads the variable a\nfrom the local and global namespaces. py_to_list() and its\nfriends has the following form:\n\n
    \n    static Py::List py_to_list(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyList_Check(py_obj))\n            handle_bad_type(py_obj,\"list\", name);\n        return Py::List(py_obj);\n    }\n    \n    static Py::String py_to_string(PyObject* py_obj,char* name)\n    {\n        if (!PyString_Check(py_obj))\n            handle_bad_type(py_obj,\"string\", name);\n        return Py::String(py_obj);\n    }\n\n    static Py::Dict py_to_dict(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyDict_Check(py_obj))\n            handle_bad_type(py_obj,\"dict\", name);\n        return Py::Dict(py_obj);\n    }\n    \n    static Py::Tuple py_to_tuple(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyTuple_Check(py_obj))\n            handle_bad_type(py_obj,\"tuple\", name);\n        return Py::Tuple(py_obj);\n    }\n    
\n\nCXX handles reference counts on for strings, lists, tuples, and dictionaries,\nso clean up code isn't necessary.\n\n\n

File Conversion

\n\nFor the following code, \n\n
\n    >>> a = open(\"bob\",'w')  \n    >>> inline(\"\",['a'])\n    
\n\nThe argument conversion code is:\n\n
\n    /* argument conversion code */\n    PyObject* py_a = get_variable(\"a\",raw_locals,raw_globals);\n    FILE* a = py_to_file(py_a,\"a\");\n    
\n\nget_variable() reads the variable a\nfrom the local and global namespaces. py_to_file() converts\nPyObject* to a FILE* and increments the reference count of the PyObject*:\n\n
\n    FILE* py_to_file(PyObject* py_obj, char* name)\n    {\n        if (!py_obj || !PyFile_Check(py_obj))\n            handle_bad_type(py_obj,\"file\", name);\n    \n        Py_INCREF(py_obj);\n        return PyFile_AsFile(py_obj);\n    }\n    
\n\nBecause the PyObject* was incremented, the clean up code needs to decrement\nthe counter\n\n
\n    /* cleanup code */\n    Py_XDECREF(py_a);\n    
\n\nIts important to understand that file conversion only works on actual files --\ni.e. ones created using the open() command in Python. It does\nnot support converting arbitrary objects that support the file interface into\nC FILE* pointers. This can affect many things. For example, in\ninitial printf() examples, one might be tempted to solve the \nproblem of C and Python IDE's (PythonWin, PyCrust, etc.) writing to different\nstdout and stderr by using fprintf() and passing in \nsys.stdout and sys.stderr. For example, instead of\n\n
\n    >>> weave.inline('printf(\"hello\\\\n\");')\n    
\n \nYou might try:\n\n
\n    >>> buf = sys.stdout\n    >>> weave.inline('fprintf(buf,\"hello\\\\n\");',['buf'])\n    
\n\nThis will work as expected from a standard python interpreter, but in PythonWin,\nthe following occurs:\n\n
\n    >>> buf = sys.stdout\n    >>> weave.inline('fprintf(buf,\"hello\\\\n\");',['buf'])\n    Traceback (most recent call last):\n        File \"\", line 1, in ?\n        File \"C:\\Python21\\weave\\inline_tools.py\", line 315, in inline\n            auto_downcast = auto_downcast,\n        File \"C:\\Python21\\weave\\inline_tools.py\", line 386, in compile_function\n            type_factories = type_factories)\n        File \"C:\\Python21\\weave\\ext_tools.py\", line 197, in __init__\n            auto_downcast, type_factories)\n        File \"C:\\Python21\\weave\\ext_tools.py\", line 390, in assign_variable_types\n            raise TypeError, format_error_msg(errors)\n        TypeError: {'buf': \"Unable to convert variable 'buf' to a C++ type.\"}\n    
\n\nThe traceback tells us that inline() was unable to convert 'buf' to a\nC++ type (If instance conversion was implemented, the error would have occurred at \nruntime instead). Why is this? Let's look at what the buf object \nreally is:\n\n
\n    >>> buf\n    pywin.framework.interact.InteractiveView instance at 00EAD014\n    
\n\nPythonWin has reassigned sys.stdout to a special object that \nimplements the Python file interface. This works great in Python, but since \nthe special object doesn't have a FILE* pointer underlying it, fprintf doesn't \nknow what to do with it (well this will be the problem when instance conversion \nis implemented...).\n\n\n

Callable, Instance, and Module Conversion

\n\nNote: Need to look into how ref counts should be handled. Also,\nInstance and Module conversion are not currently implemented.\n\n\n
\n    >>> def a(): \n        pass\n    >>> inline(\"\",['a'])\n    
\n\nCallable and instance variables are converted to PyObject*. Nothing is done\nto there reference counts.\n\n
\n    /* argument conversion code */\n    PyObject* a = py_to_callable(get_variable(\"a\",raw_locals,raw_globals),\"a\");\n    
\n\nget_variable() reads the variable a\nfrom the local and global namespaces. The py_to_callable() and\npy_to_instance() don't currently increment the ref count.\n\n
    \n    PyObject* py_to_callable(PyObject* py_obj, char* name)\n    {\n        if (!py_obj || !PyCallable_Check(py_obj))\n            handle_bad_type(py_obj,\"callable\", name);    \n        return py_obj;\n    }\n\n    PyObject* py_to_instance(PyObject* py_obj, char* name)\n    {\n        if (!py_obj || !PyFile_Check(py_obj))\n            handle_bad_type(py_obj,\"instance\", name);    \n        return py_obj;\n    }\n    
\n \nThere is no cleanup code for callables, modules, or instances.\n\n\n

Customizing Conversions

\n

\nConverting from Python to C++ types is handled by xxx_specification classes. A \ntype specification class actually serve in two related but different \nroles. The first is in determining whether a Python variable that needs to be \nconverted should be represented by the given class. The second is as a code \ngenerator that generate C++ code needed to convert from Python to C++ types for \na specific variable.\n

\nWhen \n\n

\n    >>> a = 1\n    >>> weave.inline('printf(\"%d\",a);',['a'])\n    
\n \nis called for the first time, the code snippet has to be compiled. In this \nprocess, the variable 'a' is tested against a list of type specifications (the \ndefault list is stored in weave/ext_tools.py). The first \nspecification in the list is used to represent the variable. \n\n

\nExamples of xxx_specification are scattered throughout numerous \n\"xxx_spec.py\" files in the weave package. Closely related to \nthe xxx_specification classes are yyy_info classes. \nThese classes contain compiler, header, and support code information necessary \nfor including a certain set of capabilities (such as blitz++ or CXX support)\nin a compiled module. xxx_specification classes have one or more\nyyy_info classes associated with them.\n\nIf you'd like to define your own set of type specifications, the current best route\nis to examine some of the existing spec and info files. Maybe looking over\nsequence_spec.py and cxx_info.py are a good place to start. After defining \nspecification classes, you'll need to pass them into inline using the \ntype_factories argument. \n\nA lot of times you may just want to change how a specific variable type is \nrepresented. Say you'd rather have Python strings converted to \nstd::string or maybe char* instead of using the CXX \nstring object, but would like all other type conversions to have default \nbehavior. This requires that a new specification class that handles strings\nis written and then prepended to a list of the default type specifications. Since\nit is closer to the front of the list, it effectively overrides the default\nstring specification.\n\nThe following code demonstrates how this is done:\n\n...\n\n\n

The Catalog

\n

\ncatalog.py has a class called catalog that helps keep \ntrack of previously compiled functions. This prevents inline() \nand related functions from having to compile functions everytime they are \ncalled. Instead, catalog will check an in memory cache to see if the function \nhas already been loaded into python. If it hasn't, then it starts searching \nthrough persisent catalogs on disk to see if it finds an entry for the given \nfunction. By saving information about compiled functions to disk, it isn't\nnecessary to re-compile functions everytime you stop and restart the interpreter.\nFunctions are compiled once and stored for future use.\n\n

\nWhen inline(cpp_code) is called the following things happen:\n

    \n
  1. \n A fast local cache of functions is checked for the last function called for \n cpp_code. If an entry for cpp_code doesn't exist in the \n cache or the cached function call fails (perhaps because the function doesn't \n have compatible types) then the next step is to check the catalog. \n
  2. \n The catalog class also keeps an in-memory cache with a list of all the \n functions compiled for cpp_code. If cpp_code has\n ever been called, then this cache will be present (loaded from disk). If\n the cache isn't present, then it is loaded from disk.\n

    \n If the cache is present, each function in the cache is \n called until one is found that was compiled for the correct argument types. If \n none of the functions work, a new function is compiled with the given argument \n types. This function is written to the on-disk catalog as well as into the \n in-memory cache.

    \n
  3. \n When a lookup for cpp_code fails, the catalog looks through \n the on-disk function catalogs for the entries. The PYTHONCOMPILED variable \n determines where to search for these catalogs and in what order. If \n PYTHONCOMPILED is not present several platform dependent locations are \n searched. All functions found for cpp_code in the path are \n loaded into the in-memory cache with functions found earlier in the search \n path closer to the front of the call list.\n

    \n If the function isn't found in the on-disk catalog, \n then the function is compiled, written to the first writable directory in the \n PYTHONCOMPILED path, and also loaded into the in-memory cache.

    \n
  4. \n
\n\n\n

Function Storage: How functions are stored in caches and on disk

\n

\nFunction caches are stored as dictionaries where the key is the entire C++\ncode string and the value is either a single function (as in the \"level 1\"\ncache) or a list of functions (as in the main catalog cache). On disk\ncatalogs are stored in the same manor using standard Python shelves.\n

\nEarly on, there was a question as to whether md5 check sums of the C++\ncode strings should be used instead of the actual code strings. I think this\nis the route inline Perl took. Some (admittedly quick) tests of the md5 vs.\nthe entire string showed that using the entire string was at least a\nfactor of 3 or 4 faster for Python. I think this is because it is more\ntime consuming to compute the md5 value than it is to do look-ups of long\nstrings in the dictionary. Look at the examples/md5_speed.py file for the\ntest run. \n\n\n

Catalog search paths and the PYTHONCOMPILED variable

\n

\nThe default location for catalog files on Unix is is ~/.pythonXX_compiled where \nXX is version of Python being used. If this directory doesn't exist, it is \ncreated the first time a catalog is used. The directory must be writable. If, \nfor any reason it isn't, then the catalog attempts to create a directory based \non your user id in the /tmp directory. The directory permissions are set so \nthat only you have access to the directory. If this fails, I think you're out of \nluck. I don't think either of these should ever fail though. On Windows, a \ndirectory called pythonXX_compiled is created in the user's temporary \ndirectory. \n

\nThe actual catalog file that lives in this directory is a Python shelve with\na platform specific name such as \"nt21compiled_catalog\" so that multiple OSes\ncan share the same file systems without trampling on each other. Along with\nthe catalog file, the .cpp and .so or .pyd files created by inline will live\nin this directory. The catalog file simply contains keys which are the C++\ncode strings with values that are lists of functions. The function lists point\nat functions within these compiled modules. Each function in the lists \nexecutes the same C++ code string, but compiled for different input variables.\n

\nYou can use the PYTHONCOMPILED environment variable to specify alternative\nlocations for compiled functions. On Unix this is a colon (':') separated\nlist of directories. On windows, it is a (';') separated list of directories.\nThese directories will be searched prior to the default directory for a\ncompiled function catalog. Also, the first writable directory in the list\nis where all new compiled function catalogs, .cpp and .so or .pyd files are\nwritten. Relative directory paths ('.' and '..') should work fine in the\nPYTHONCOMPILED variable as should environement variables.\n

\nThere is a \"special\" path variable called MODULE that can be placed in the \nPYTHONCOMPILED variable. It specifies that the compiled catalog should\nreside in the same directory as the module that called it. This is useful\nif an admin wants to build a lot of compiled functions during the build\nof a package and then install them in site-packages along with the package.\nUser's who specify MODULE in their PYTHONCOMPILED variable will have access\nto these compiled functions. Note, however, that if they call the function\nwith a set of argument types that it hasn't previously been built for, the\nnew function will be stored in their default directory (or some other writable\ndirectory in the PYTHONCOMPILED path) because the user will not have write\naccess to the site-packages directory.\n

\nAn example of using the PYTHONCOMPILED path on bash follows:\n\n

\n    PYTHONCOMPILED=MODULE:/some/path;export PYTHONCOMPILED;\n    
\n\nIf you are using python21 on linux, and the module bob.py in site-packages\nhas a compiled function in it, then the catalog search order when calling that\nfunction for the first time in a python session would be:\n\n
\n    /usr/lib/python21/site-packages/linuxpython_compiled\n    /some/path/linuxpython_compiled\n    ~/.python21_compiled/linuxpython_compiled\n    
\n\nThe default location is always included in the search path.\n

\n \nNote: hmmm. see a possible problem here. I should probably make a sub-\ndirectory such as /usr/lib/python21/site-\npackages/python21_compiled/linuxpython_compiled so that library files compiled \nwith python21 are tried to link with python22 files in some strange scenarios. \nNeed to check this.\n\n\n

\nThe in-module cache (in weave.inline_tools reduces the overhead \nof calling inline functions by about a factor of 2. It can be reduced a little \nmore for type loop calls where the same function is called over and over again \nif the cache was a single value instead of a dictionary, but the benefit is \nvery small (less than 5%) and the utility is quite a bit less. So, we'll stick \nwith a dictionary as the cache.\n

\n\n\n

Blitz

\n Note: most of this section is lifted from old documentation. It should be\npretty accurate, but there may be a few discrepancies.\n

\nweave.blitz() compiles Numeric Python expressions for fast \nexecution. For most applications, compiled expressions should provide a \nfactor of 2-10 speed-up over Numeric arrays. Using compiled \nexpressions is meant to be as unobtrusive as possible and works much like \npythons exec statement. As an example, the following code fragment takes a 5 \npoint average of the 512x512 2d image, b, and stores it in array, a:\n\n

\n    from scipy import *  # or from Numeric import *\n    a = ones((512,512), Float64) \n    b = ones((512,512), Float64) \n    # ...do some stuff to fill in b...\n    # now average\n    a[1:-1,1:-1] =  (b[1:-1,1:-1] + b[2:,1:-1] + b[:-2,1:-1] \\\n                   + b[1:-1,2:] + b[1:-1,:-2]) / 5.\n    
\n \nTo compile the expression, convert the expression to a string by putting\nquotes around it and then use weave.blitz:\n\n
\n    import weave\n    expr = \"a[1:-1,1:-1] =  (b[1:-1,1:-1] + b[2:,1:-1] + b[:-2,1:-1]\" \\\n                          \"+ b[1:-1,2:] + b[1:-1,:-2]) / 5.\"\n    weave.blitz(expr)\n    
\n\nThe first time weave.blitz is run for a given expression and \nset of arguements, C++ code that accomplishes the exact same task as the Python \nexpression is generated and compiled to an extension module. This can take up \nto a couple of minutes depending on the complexity of the function. Subsequent \ncalls to the function are very fast. Futher, the generated module is saved \nbetween program executions so that the compilation is only done once for a \ngiven expression and associated set of array types. If the given expression\nis executed with a new set of array types, the code most be compiled again. This\ndoes not overwrite the previously compiled function -- both of them are saved and\navailable for exectution. \n

\nThe following table compares the run times for standard Numeric code and \ncompiled code for the 5 point averaging.\n

\n

\n\n\n\n\n\n
Method Run Time (seconds)
Standard Numeric 0.46349
blitz (1st time compiling) 78.95526
blitz (subsequent calls) 0.05843 (factor of 8 speedup)
\n
\n

\nThese numbers are for a 512x512 double precision image run on a 400 MHz Celeron \nprocessor under RedHat Linux 6.2.\n

\nBecause of the slow compile times, its probably most effective to develop \nalgorithms as you usually do using the capabilities of scipy or the Numeric \nmodule. Once the algorithm is perfected, put quotes around it and execute it \nusing weave.blitz. This provides the standard rapid \nprototyping strengths of Python and results in algorithms that run close to \nthat of hand coded C or Fortran.\n\n\n

Requirements

\n\nCurrently, the weave.blitz has only been tested under Linux \nwith gcc-2.95-3 and on Windows with Mingw32 (2.95.2). Its compiler \nrequirements are pretty heavy duty (see the \nblitz++ home page), so it won't \nwork with just any compiler. Particularly MSVC++ isn't up to snuff. A number \nof other compilers such as KAI++ will also work, but my suspicions are that gcc \nwill get the most use.\n\n\n

Limitations

\n
    \n
  1. \nCurrently, weave.blitz handles all standard mathematic \noperators except for the ** power operator. The built-in trigonmetric, log, \nfloor/ceil, and fabs functions might work (but haven't been tested). It also \nhandles all types of array indexing supported by the Numeric module. \n

    \nweave.blitz does not currently support operations that use \narray broadcasting, nor have any of the special purpose functions in Numeric \nsuch as take, compress, etc. been implemented. Note that there are no obvious \nreasons why most of this functionality cannot be added to scipy.weave, so it \nwill likely trickle into future versions. Using slice() objects \ndirectly instead of start:stop:step is also not supported.\n

  2. \n
  3. \nCurrently Python only works on expressions that include assignment such as\n \n
    \n    >>> result = b + c + d\n    
    \n\nThis means that the result array must exist before calling \nweave.blitz. Future versions will allow the following:\n\n
    \n    >>> result = weave.blitz_eval(\"b + c + d\")\n    
    \n
  4. \n
  5. \nweave.blitz works best when algorithms can be expressed in a \n\"vectorized\" form. Algorithms that have a large number of if/thens and other \nconditions are better hand written in C or Fortran. Further, the restrictions \nimposed by requiring vectorized expressions sometimes preclude the use of more \nefficient data structures or algorithms. For maximum speed in these cases, \nhand-coded C or Fortran code is the only way to go.\n
  6. \n
  7. \nweave.blitz can produce different results than Numeric in certain \nsituations. It can happen when the array receiving the results of a \ncalculation is also used during the calculation. The Numeric behavior is to \ncarry out the entire calculation on the right hand side of an equation and \nstore it in a temporary array. This temprorary array is assigned to the array \non the left hand side of the equation. blitz, on the other hand, does a \n\"running\" calculation of the array elements assigning values from the right hand\nside to the elements on the left hand side immediately after they are calculated.\nHere is an example, provided by Prabhu Ramachandran, where this happens:\n\n
    \n        # 4 point average.\n        >>> expr = \"u[1:-1, 1:-1] = (u[0:-2, 1:-1] + u[2:, 1:-1] + \"\\\n        ...                \"u[1:-1,0:-2] + u[1:-1, 2:])*0.25\"\n        >>> u = zeros((5, 5), 'd'); u[0,:] = 100\n        >>> exec (expr)\n        >>> u\n        array([[ 100.,  100.,  100.,  100.,  100.],\n               [   0.,   25.,   25.,   25.,    0.],\n               [   0.,    0.,    0.,    0.,    0.],\n               [   0.,    0.,    0.,    0.,    0.],\n               [   0.,    0.,    0.,    0.,    0.]])\n        \n        >>> u = zeros((5, 5), 'd'); u[0,:] = 100\n        >>> weave.blitz (expr)\n        >>> u\n        array([[ 100.  ,  100.       ,  100.       ,  100.       ,  100. ],\n               [   0.  ,   25.       ,   31.25     ,   32.8125   ,    0. ],\n               [   0.  ,    6.25     ,    9.375    ,   10.546875 ,    0. ],\n               [   0.  ,    1.5625   ,    2.734375 ,    3.3203125,    0. ],\n               [   0.  ,    0.       ,    0.       ,    0.       ,    0. ]])    \n        
    \n \n You can prevent this behavior by using a temporary array.\n \n
    \n        >>> u = zeros((5, 5), 'd'); u[0,:] = 100\n        >>> temp = zeros((4, 4), 'd');\n        >>> expr = \"temp = (u[0:-2, 1:-1] + u[2:, 1:-1] + \"\\\n        ...        \"u[1:-1,0:-2] + u[1:-1, 2:])*0.25;\"\\\n        ...        \"u[1:-1,1:-1] = temp\"\n        >>> weave.blitz (expr)\n        >>> u\n        array([[ 100.,  100.,  100.,  100.,  100.],\n               [   0.,   25.,   25.,   25.,    0.],\n               [   0.,    0.,    0.,    0.,    0.],\n               [   0.,    0.,    0.,    0.,    0.],\n               [   0.,    0.,    0.,    0.,    0.]])\n        
    \n \n
  8. \n
  9. \nOne other point deserves mention lest people be confused. \nweave.blitz is not a general purpose Python->C compiler. It \nonly works for expressions that contain Numeric arrays and/or \nPython scalar values. This focused scope concentrates effort on the \ncompuationally intensive regions of the program and sidesteps the difficult \nissues associated with a general purpose Python->C compiler.\n
  10. \n
\n\n\n

Numeric efficiency issues: What compilation buys you

\n\nSome might wonder why compiling Numeric expressions to C++ is beneficial since \noperations on Numeric array operations are already executed within C loops. \nThe problem is that anything other than the simplest expression are executed in \nless than optimal fashion. Consider the following Numeric expression:\n\n
\n    a = 1.2 * b + c * d\n    
\n \nWhen Numeric calculates the value for the 2d array, a, it does \nthe following steps:\n\n
\n    temp1 = 1.2 * b\n    temp2 = c * d\n    a = temp1 + temp2\n    
\n \nTwo things to note. Since c is an (perhaps large) array, a large \ntemporary array must be created to store the results of 1.2 * b. \nThe same is true for temp2. Allocation is slow. The second thing \nis that we have 3 loops executing, one to calculate temp1, one for \ntemp2 and one for adding them up. A C loop for the same problem \nmight look like:\n\n
\n    for(int i = 0; i < M; i++)\n        for(int j = 0; j < N; j++)\n            a[i,j] = 1.2 * b[i,j] + c[i,j] * d[i,j]\n    
\n \nHere, the 3 loops have been fused into a single loop and there is no longer\na need for a temporary array. This provides a significant speed improvement\nover the above example (write me and tell me what you get). \n

\nSo, converting Numeric expressions into C/C++ loops that fuse the loops and \neliminate temporary arrays can provide big gains. The goal then,is to convert \nNumeric expression to C/C++ loops, compile them in an extension module, and \nthen call the compiled extension function. The good news is that there is an \nobvious correspondence between the Numeric expression above and the C loop. The \nbad news is that Numeric is generally much more powerful than this simple \nexample illustrates and handling all possible indexing possibilities results in \nloops that are less than straight forward to write. (take a peak in Numeric for \nconfirmation). Luckily, there are several available tools that simplify the \nprocess.\n\n\n

The Tools

\n\nweave.blitz relies heavily on several remarkable tools. On the \nPython side, the main facilitators are Jermey Hylton's parser module and Jim \nHuginin's Numeric module. On the compiled language side, Todd Veldhuizen's \nblitz++ array library, written in C++ (shhhh. don't tell David Beazley), does \nthe heavy lifting. Don't assume that, because it's C++, it's much slower than C \nor Fortran. Blitz++ uses a jaw dropping array of template techniques \n(metaprogramming, template expression, etc) to convert innocent looking and \nreadable C++ expressions into to code that usually executes within a few \npercentage points of Fortran code for the same problem. This is good. \nUnfortunately all the template raz-ma-taz is very expensive to compile, so the \n200 line extension modules often take 2 or more minutes to compile. This isn't so \ngood. weave.blitz works to minimize this issue by remembering \nwhere compiled modules live and reusing them instead of re-compiling every time \na program is re-run.\n\n\n

Parser

\nTearing Numeric expressions apart, examining the pieces, and then rebuilding \nthem as C++ (blitz) expressions requires a parser of some sort. I can imagine \nsomeone attacking this problem with regular expressions, but it'd likely be \nugly and fragile. Amazingly, Python solves this problem for us. It actually \nexposes its parsing engine to the world through the parser module. \nThe following fragment creates an Abstract Syntax Tree (AST) object for the \nexpression and then converts to a (rather unpleasant looking) deeply nested list \nrepresentation of the tree. \n \n
\n    >>> import parser\n    >>> import scipy.weave.misc\n    >>> ast = parser.suite(\"a = b * c + d\")\n    >>> ast_list = ast.tolist()\n    >>> sym_list = scipy.weave.misc.translate_symbols(ast_list)\n    >>> pprint.pprint(sym_list)\n    ['file_input',\n     ['stmt',\n      ['simple_stmt',\n       ['small_stmt',\n        ['expr_stmt',\n         ['testlist',\n          ['test',\n           ['and_test',\n            ['not_test',\n             ['comparison',\n              ['expr',\n               ['xor_expr',\n                ['and_expr',\n                 ['shift_expr',\n                  ['arith_expr',\n                   ['term',\n                    ['factor', ['power', ['atom', ['NAME', 'a']]]]]]]]]]]]]]],\n         ['EQUAL', '='],\n         ['testlist',\n          ['test',\n           ['and_test',\n            ['not_test',\n             ['comparison',\n              ['expr',\n               ['xor_expr',\n                ['and_expr',\n                 ['shift_expr',\n                  ['arith_expr',\n                   ['term',\n                    ['factor', ['power', ['atom', ['NAME', 'b']]]],\n                    ['STAR', '*'],\n                    ['factor', ['power', ['atom', ['NAME', 'c']]]]],\n                   ['PLUS', '+'],\n                   ['term',\n                    ['factor', ['power', ['atom', ['NAME', 'd']]]]]]]]]]]]]]]]],\n       ['NEWLINE', '']]],\n     ['ENDMARKER', '']]\n    
\n\nDespite its looks, with some tools developed by Jermey H., its possible\nto search these trees for specific patterns (sub-trees), extract the \nsub-tree, manipulate them converting python specific code fragments\nto blitz code fragments, and then re-insert it in the parse tree. The parser\nmodule documentation has some details on how to do this. Traversing the \nnew blitzified tree, writing out the terminal symbols as you go, creates\nour new blitz++ expression string.\n\n \n

Blitz and Numeric

\nThe other nice discovery in the project is that the data structure used\nfor Numeric arrays and blitz arrays is nearly identical. Numeric stores\n\"strides\" as byte offsets and blitz stores them as element offsets, but\nother than that, they are the same. Further, most of the concept and\ncapabilities of the two libraries are remarkably similar. It is satisfying \nthat two completely different implementations solved the problem with \nsimilar basic architectures. It is also fortitous. The work involved in \nconverting Numeric expressions to blitz expressions was greatly diminished.\nAs an example, consider the code for slicing an array in Python with a\nstride:\n\n
\n    >>> a = b[0:4:2] + c\n    >>> a\n    [0,2,4]\n    
\n\n\nIn Blitz it is as follows:\n\n
\n    Array<2,int> b(10);\n    Array<2,int> c(3);\n    // ...\n    Array<2,int> a = b(Range(0,3,2)) + c;\n    
\n\n\nHere the range object works exactly like Python slice objects with the exception\nthat the top index (3) is inclusive where as Python's (4) is exclusive. Other \ndifferences include the type declaraions in C++ and parentheses instead of \nbrackets for indexing arrays. Currently, weave.blitz handles the \ninclusive/exclusive issue by subtracting one from upper indices during the\ntranslation. An alternative that is likely more robust/maintainable in the \nlong run, is to write a PyRange class that behaves like Python's range. \nThis is likely very easy.\n

\nThe stock blitz also doesn't handle negative indices in ranges. The current \nimplementation of the blitz() has a partial solution to this \nproblem. It calculates and index that starts with a '-' sign by subtracting it \nfrom the maximum index in the array so that:\n\n

\n                    upper index limit\n                        /-----\\\n    b[:-1] -> b(Range(0,Nb[0]-1-1))\n    
\n\nThis approach fails, however, when the top index is calculated from other \nvalues. In the following scenario, if i+j evaluates to a negative \nvalue, the compiled code will produce incorrect results and could even core-\ndump. Right now, all calculated indices are assumed to be positive.\n \n
\n    b[:i-j] -> b(Range(0,i+j))\n    
\n\nA solution is to calculate all indices up front using if/then to handle the\n+/- cases. This is a little work and results in more code, so it hasn't been\ndone. I'm holding out to see if blitz++ can be modified to handle negative\nindexing, but haven't looked into how much effort is involved yet. While it \nneeds fixin', I don't think there is a ton of code where this is an issue.\n

\nThe actual translation of the Python expressions to blitz expressions is \ncurrently a two part process. First, all x:y:z slicing expression are removed\nfrom the AST, converted to slice(x,y,z) and re-inserted into the tree. Any\nmath needed on these expressions (subtracting from the \nmaximum index, etc.) are also preformed here. _beg and _end are used as special\nvariables that are defined as blitz::fromBegin and blitz::toEnd.\n\n

\n    a[i+j:i+j+1,:] = b[2:3,:] \n    
\n\nbecomes a more verbose:\n \n
\n    a[slice(i+j,i+j+1),slice(_beg,_end)] = b[slice(2,3),slice(_beg,_end)]\n    
\n \nThe second part does a simple string search/replace to convert to a blitz \nexpression with the following translations:\n\n
\n    slice(_beg,_end) -> _all  # not strictly needed, but cuts down on code.\n    slice            -> blitz::Range\n    [                -> (\n    ]                -> )\n    _stp             -> 1\n    
\n\n_all is defined in the compiled function as \nblitz::Range.all(). These translations could of course happen \ndirectly in the syntax tree. But the string replacement is slightly easier. \nNote that name spaces are maintained in the C++ code to lessen the likelyhood \nof name clashes. Currently no effort is made to detect name clashes. A good \nrule of thumb is don't use values that start with '_' or 'py_' in compiled \nexpressions and you'll be fine.\n\n \n

Type definitions and coersion

\n\nSo far we've glossed over the dynamic vs. static typing issue between Python \nand C++. In Python, the type of value that a variable holds can change\nthrough the course of program execution. C/C++, on the other hand, forces you\nto declare the type of value a variables will hold prior at compile time.\nweave.blitz handles this issue by examining the types of the\nvariables in the expression being executed, and compiling a function for those\nexplicit types. For example:\n\n
\n    a = ones((5,5),Float32)\n    b = ones((5,5),Float32)\n    weave.blitz(\"a = a + b\")\n    
\n\nWhen compiling this expression to C++, weave.blitz sees that the\nvalues for a and b in the local scope have type Float32, or 'float'\non a 32 bit architecture. As a result, it compiles the function using \nthe float type (no attempt has been made to deal with 64 bit issues).\nIt also goes one step further. If all arrays have the same type, a templated\nversion of the function is made and instantiated for float, double, \ncomplex, and complex arrays. Note: This feature has been \nremoved from the current version of the code. Each version will be compiled\nseparately \n

\nWhat happens if you call a compiled function with array types that are \ndifferent than the ones for which it was originally compiled? No biggie, you'll \njust have to wait on it to compile a new version for your new types. This \ndoesn't overwrite the old functions, as they are still accessible. See the \ncatalog section in the inline() documentation to see how this is handled. \nSuffice to say, the mechanism is transparent to the user and behaves \nlike dynamic typing with the occasional wait for compiling newly typed \nfunctions.\n

\nWhen working with combined scalar/array operations, the type of the array is \nalways used. This is similar to the savespace flag that was recently \nadded to Numeric. This prevents issues with the following expression perhaps \nunexpectedly being calculated at a higher (more expensive) precision that can \noccur in Python:\n\n

\n    >>> a = array((1,2,3),typecode = Float32)\n    >>> b = a * 2.1 # results in b being a Float64 array.\n    
\n \nIn this example, \n\n
\n    >>> a = ones((5,5),Float32)\n    >>> b = ones((5,5),Float32)\n    >>> weave.blitz(\"b = a * 2.1\")\n    
\n \nthe 2.1 is cast down to a float before carrying out \nthe operation. If you really want to force the calculation to be a \ndouble, define a and b as \ndouble arrays.\n

\nOne other point of note. Currently, you must include both the right hand side \nand left hand side (assignment side) of your equation in the compiled \nexpression. Also, the array being assigned to must be created prior to calling \nweave.blitz. I'm pretty sure this is easily changed so that a \ncompiled_eval expression can be defined, but no effort has been made to \nallocate new arrays (and decern their type) on the fly.\n\n \n

Cataloging Compiled Functions

\n\nSee the Cataloging functions section in the \nweave.inline() documentation.\n\n \n

Checking Array Sizes

\n\nSurprisingly, one of the big initial problems with compiled code was making\nsure all the arrays in an operation were of compatible type. The following\ncase is trivially easy:\n\n
\n    a = b + c\n    
\n \nIt only requires that arrays a, b, and c \nhave the same shape. However, expressions like:\n\n
\n    a[i+j:i+j+1,:] = b[2:3,:] + c\n    
\n\nare not so trivial. Since slicing is involved, the size of the slices, not the \ninput arrays must be checked. Broadcasting complicates things further because \narrays and slices with different dimensions and shapes may be compatible for \nmath operations (broadcasting isn't yet supported by \nweave.blitz). Reductions have a similar effect as their \nresults are different shapes than their input operand. The binary operators in \nNumeric compare the shapes of their two operands just before they operate on \nthem. This is possible because Numeric treats each operation independently. \nThe intermediate (temporary) arrays created during sub-operations in an \nexpression are tested for the correct shape before they are combined by another \noperation. Because weave.blitz fuses all operations into a \nsingle loop, this isn't possible. The shape comparisons must be done and \nguaranteed compatible before evaluating the expression.\n

\nThe solution chosen converts input arrays to \"dummy arrays\" that only represent \nthe dimensions of the arrays, not the data. Binary operations on dummy arrays \ncheck that input array sizes are comptible and return a dummy array with the \nsize correct size. Evaluating an expression of dummy arrays traces the \nchanging array sizes through all operations and fails if incompatible array \nsizes are ever found. \n

\nThe machinery for this is housed in weave.size_check. It \nbasically involves writing a new class (dummy array) and overloading it math \noperators to calculate the new sizes correctly. All the code is in Python and \nthere is a fair amount of logic (mainly to handle indexing and slicing) so the \noperation does impose some overhead. For large arrays (ie. 50x50x50), the \noverhead is negligible compared to evaluating the actual expression. For small \narrays (ie. 16x16), the overhead imposed for checking the shapes with this \nmethod can cause the weave.blitz to be slower than evaluating \nthe expression in Python. \n

\nWhat can be done to reduce the overhead? (1) The size checking code could be \nmoved into C. This would likely remove most of the overhead penalty compared \nto Numeric (although there is also some calling overhead), but no effort has \nbeen made to do this. (2) You can also call weave.blitz with\ncheck_size=0 and the size checking isn't done. However, if the \nsizes aren't compatible, it can cause a core-dump. So, foregoing size_checking\nisn't advisable until your code is well debugged.\n\n \n

Creating the Extension Module

\n\nweave.blitz uses the same machinery as \nweave.inline to build the extension module. The only difference\nis the code included in the function is automatically generated from the\nNumeric array expression instead of supplied by the user.\n\n\n

Extension Modules

\nweave.inline and weave.blitz are high level tools\nthat generate extension modules automatically. Under the covers, they use several\nclasses from weave.ext_tools to help generate the extension module.\nThe main two classes are ext_module and ext_function (I'd\nlike to add ext_class and ext_method also). These classes\nsimplify the process of generating extension modules by handling most of the \"boiler\nplate\" code automatically.\n\n\nNote: inline actually sub-classes weave.ext_tools.ext_function \nto generate slightly different code than the standard ext_function.\nThe main difference is that the standard class converts function arguments to\nC types, while inline always has two arguments, the local and global dicts, and\nthe grabs the variables that need to be convereted to C from these.\n\n\n\n

A Simple Example

\nThe following simple example demonstrates how to build an extension module within\na Python function:\n\n
\n    # examples/increment_example.py\n    from weave import ext_tools\n    \n    def build_increment_ext():\n        \"\"\" Build a simple extension with functions that increment numbers.\n            The extension will be built in the local directory.\n        \"\"\"        \n        mod = ext_tools.ext_module('increment_ext')\n    \n        a = 1 # effectively a type declaration for 'a' in the \n              # following functions.\n    \n        ext_code = \"return_val = Py::new_reference_to(Py::Int(a+1));\"    \n        func = ext_tools.ext_function('increment',ext_code,['a'])\n        mod.add_function(func)\n        \n        ext_code = \"return_val = Py::new_reference_to(Py::Int(a+2));\"    \n        func = ext_tools.ext_function('increment_by_2',ext_code,['a'])\n        mod.add_function(func)\n                \n        mod.compile()\n    
\n\n\nThe function build_increment_ext() creates an extension module \nnamed increment_ext and compiles it to a shared library (.so or \n.pyd) that can be loaded into Python.. increment_ext contains two \nfunctions, increment and increment_by_2. \n\nThe first line of build_increment_ext(),\n\n
\n        mod = ext_tools.ext_module('increment_ext') \n    
\n\ncreates an ext_module instance that is ready to have \next_function instances added to it. ext_function \ninstances are created much with a calling convention similar to \nweave.inline(). The most common call includes a C/C++ code \nsnippet and a list of the arguments for the function. The following\n\n
\n        ext_code = \"return_val = Py::new_reference_to(Py::Int(a+1));\"    \n        func = ext_tools.ext_function('increment',ext_code,['a'])\n    
\n \ncreates a C/C++ extension function that is equivalent to the following Python\nfunction:\n\n
\n        def increment(a):\n            return a + 1\n    
\n\nA second method is also added to the module and then,\n\n
\n        mod.compile()\n    
\n\nis called to build the extension module. By default, the module is created\nin the current working directory.\n\nThis example is available in the examples/increment_example.py file\nfound in the weave directory. At the bottom of the file in the\nmodule's \"main\" program, an attempt to import increment_ext without\nbuilding it is made. If this fails (the module doesn't exist in the PYTHONPATH), \nthe module is built by calling build_increment_ext(). This approach\nonly takes the time consuming ( a few seconds for this example) process of building\nthe module if it hasn't been built before.\n\n
\n    if __name__ == \"__main__\":\n        try:\n            import increment_ext\n        except ImportError:\n            build_increment_ext()\n            import increment_ext\n        a = 1\n        print 'a, a+1:', a, increment_ext.increment(a)\n        print 'a, a+2:', a, increment_ext.increment_by_2(a)           \n    
\n\n\nNote: If we were willing to always pay the penalty of building the C++ code for \na module, we could store the md5 checksum of the C++ code along with some \ninformation about the compiler, platform, etc. Then, \next_module.compile() could try importing the module before it actually\ncompiles it, check the md5 checksum and other meta-data in the imported module\nwith the meta-data of the code it just produced and only compile the code if\nthe module didn't exist or the meta-data didn't match. This would reduce the\nabove code to:\n\n
\n    if __name__ == \"__main__\":\n        build_increment_ext()\n\n        a = 1\n        print 'a, a+1:', a, increment_ext.increment(a)\n        print 'a, a+2:', a, increment_ext.increment_by_2(a)           \n    
\n\nNote: There would always be the overhead of building the C++ code, but it would only actually compile the code once. You pay a little in overhead and get cleaner\n\"import\" code. Needs some thought.\n\n

\n\nIf you run increment_example.py from the command line, you get\nthe following:\n\n

\n    [eric@n0]$ python increment_example.py\n    a, a+1: 1 2\n    a, a+2: 1 3\n    
\n\nIf the module didn't exist before it was run, the module is created. If it did\nexist, it is just imported and used.\n\n\n

Fibonacci Example

\nexamples/fibonacci.py provides a little more complex example of \nhow to use ext_tools. Fibonacci numbers are a series of numbers \nwhere each number in the series is the sum of the previous two: 1, 1, 2, 3, 5, \n8, etc. Here, the first two numbers in the series are taken to be 1. One \napproach to calculating Fibonacci numbers uses recursive function calls. In \nPython, it might be written as:\n\n
\n    def fib(a):\n        if a <= 2:\n            return 1\n        else:\n            return fib(a-2) + fib(a-1)\n    
\n\nIn C, the same function would look something like this:\n\n
\n     int fib(int a)\n     {                   \n         if(a <= 2)\n             return 1;\n         else\n             return fib(a-2) + fib(a-1);  \n     }                      \n    
\n\nRecursion is much faster in C than in Python, so it would be beneficial\nto use the C version for fibonacci number calculations instead of the\nPython version. We need an extension function that calls this C function\nto do this. This is possible by including the above code snippet as \n\"support code\" and then calling it from the extension function. Support \ncode snippets (usually structure definitions, helper functions and the like)\nare inserted into the extension module C/C++ file before the extension\nfunction code. Here is how to build the C version of the fibonacci number\ngenerator:\n\n
\ndef build_fibonacci():\n    \"\"\" Builds an extension module with fibonacci calculators.\n    \"\"\"\n    mod = ext_tools.ext_module('fibonacci_ext')\n    a = 1 # this is effectively a type declaration\n    \n    # recursive fibonacci in C \n    fib_code = \"\"\"\n                   int fib1(int a)\n                   {                   \n                       if(a <= 2)\n                           return 1;\n                       else\n                           return fib1(a-2) + fib1(a-1);  \n                   }                         \n               \"\"\"\n    ext_code = \"\"\"\n                   int val = fib1(a);\n                   return_val = Py::new_reference_to(Py::Int(val));\n               \"\"\"    \n    fib = ext_tools.ext_function('fib',ext_code,['a'])\n    fib.customize.add_support_code(fib_code)\n    mod.add_function(fib)\n\n    mod.compile()\n\n    
\n\nXXX More about custom_info, and what xxx_info instances are good for.\n\n

\n\nNote: recursion is not the fastest way to calculate fibonacci numbers, but this \napproach serves nicely for this example.\n\n

\n\n

Customizing Type Conversions -- Type Factories

\nnot written\n\n

Things I wish weave did

\n\nIt is possible to get name clashes if you uses a variable name that is already defined\nin a header automatically included (such as stdio.h) For instance, if you\ntry to pass in a variable named stdout, you'll get a cryptic error report\ndue to the fact that stdio.h also defines the name. weave\nshould probably try and handle this in some way.\n\nOther things...\n", "source_code_before": "\n

Weave Documentation

\n

\nBy Eric Jones eric@enthought.com\n

\n

Outline

\n
\n
Introduction\n
Requirements\n
Installation\n
Testing\n
Benchmarks\n
Inline\n
\n
More with printf\n
\n More examples\n
\n
Binary search\n
Dictionary sort\n
Numeric -- cast/copy/transpose\n
wxPython
\n
\n
Keyword options\n
Returning values\n
\n
\n The issue with locals()
\n
\n
A quick look at the code\n
\n Technical Details\n
\n
Converting Types\n
\n
\n Numeric Argument Conversion\n
\n String, List, Tuple, and Dictionary Conversion\n
File Conversion \n
\n Callable, Instance, and Module Conversion \n
Customizing Conversions\n
\n
Compiling Code\n
\"Cataloging\" functions\n
\n
Function Storage\n
The PYTHONCOMPILED evnironment variable
\n
\n
\n
\n
\n
\n
Blitz\n
\n
Requirements\n
Limitations\n
Numeric Efficiency Issues\n
The Tools \n
\n
Parser\n
Blitz and Numeric\n
\n
Type defintions and coersion\n
Cataloging Compiled Functions\n
Checking Array Sizes\n
Creating the Extension Module\n
\n
Extension Modules\n
\n
A Simple Example\n
Fibonacci Example\n
\n
Customizing Type Conversions -- Type Factories (not written)\n
\n
Type Specifications\n
Type Information\n
The Conversion Process \n
\n
\n\n

Introduction

\n\n

\nThe weave package provides tools for including C/C++ code within\nin Python code. This offers both another level of optimization to those who need \nit, and an easy way to modify and extend any supported extension libraries such \nas wxPython and hopefully VTK soon. Inlining C/C++ code within Python generally\nresults in speed ups of 1.5x to 30x speed-up over algorithms written in pure\nPython (However, it is also possible to slow things down...). Generally \nalgorithms that require a large number of calls to the Python API don't benefit\nas much from the conversion to C/C++ as algorithms that have inner loops \ncompletely convertable to C.\n

\nThere are three basic ways to use weave. The \nweave.inline() function executes C code directly within Python, \nand weave.blitz() translates Python Numeric expressions to C++ \nfor fast execution. blitz() was the original reason \nweave was built. For those interested in building extension\nlibraries, the ext_tools module provides classes for building \nextension modules within Python. \n

\nMost of weave's functionality should work on Windows and Unix, \nalthough some of its functionality requires gcc or a similarly \nmodern C++ compiler that handles templates well. Up to now, most testing has \nbeen done on Windows 2000 with Microsoft's C++ compiler (MSVC) and with gcc \n(mingw32 2.95.2 and 2.95.3-6). All tests also pass on Linux (RH 7.1 \nwith gcc 2.96), and I've had reports that it works on Debian also (thanks \nPearu).\n

\nThe inline and blitz provide new functionality to \nPython (although I've recently learned about the PyInline project which may offer \nsimilar functionality to inline). On the other hand, tools for \nbuilding Python extension modules already exists (SWIG, SIP, pycpp, CXX, and \nothers). As of yet, I'm not sure where weave fits in this \nspectrum. It is closest in flavor to CXX in that it makes creating new C/C++ \nextension modules pretty easy. However, if you're wrapping a gaggle of legacy \nfunctions or classes, SWIG and friends are definitely the better choice. \nweave is set up so that you can customize how Python types are \nconverted to C types in weave. This is great for \ninline(), but, for wrapping legacy code, it is more flexible to \nspecify things the other way around -- that is how C types map to Python types. \nThis weave does not do. I guess it would be possible to build \nsuch a tool on top of weave, but with good tools like SWIG around, \nI'm not sure the effort produces any new capabilities. Things like function \noverloading are probably easily implemented in weave and it might \nbe easier to mix Python/C code in function calls, but nothing beyond this comes \nto mind. So, if you're developing new extension modules or optimizing Python \nfunctions in C, weave.ext_tools() might be the tool \nfor you. If you're wrapping legacy code, stick with SWIG.\n

\nThe next several sections give the basics of how to use weave.\nWe'll discuss what's happening under the covers in more detail later \non. Serious users will need to at least look at the type conversion section to \nunderstand how Python variables map to C/C++ types and how to customize this \nbehavior. One other note. If you don't know C or C++ then these docs are \nprobably of very little help to you. Further, it'd be helpful if you know \nsomething about writing Python extensions. weave does quite a \nbit for you, but for anything complex, you'll need to do some conversions, \nreference counting, etc.\n

\n\nNote: weave is actually part of the SciPy package. However, it works fine as a \nstandalone package. The examples here are given as if it is used as a stand \nalone package. If you are using from within scipy, you can use from \nscipy import weave and the examples will work identically.\n\n\n

Requirements

\n
    \n
  • Python\n

    \n I use 2.1.1. Probably 2.0 or higher should work.\n

    \n

  • \n \n
  • C++ compiler\n

    \n weave uses distutils to actually build \n extension modules, so it uses whatever compiler was originally used to \n build Python. weave itself requires a C++ compiler. If \n you used a C++ compiler to build Python, your probably fine.\n

    \n On Unix gcc is the preferred choice because I've done a little \n testing with it. All testing has been done with gcc, but I expect the \n majority of compilers should work for inline and \n ext_tools. The one issue I'm not sure about is that I've \n hard coded things so that compilations are linked with the \n stdc++ library. Is this standard across \n Unix compilers, or is this a gcc-ism?\n

    \n For blitz(), you'll need a reasonably recent version of \n gcc. 2.95.2 works on windows and 2.96 looks fine on Linux. Other \n versions are likely to work. Its likely that KAI's C++ compiler and \n maybe some others will work, but I haven't tried. My advice is to use \n gcc for now unless your willing to tinker with the code some.\n

    \n On Windows, either MSVC or gcc (www.mingw.org\" > mingw32) should work. Again, \n you'll need gcc for blitz() as the\n MSVC compiler doesn't handle templates well.\n

    \n I have not tried Cygwin, so please report success if it works for you.\n

    \n

  • \n\n
  • Numeric (optional)\n

    \n The python Numeric module from here. is required for \n blitz() to work. Be sure and get NumPy, not NumArray\n which is the \"next generation\" implementation. This is not\n required for using inline() or ext_tools.\n

    \n

  • \n
  • scipy_distutils and scipy_test (packaged with weave)\n

    \n These two modules are packaged with weave in both\n the windows installer and the source distributions. If you are using\n CVS, however, you'll need to download these separately (also available\n through CVS at SciPy).\n

    \n

  • \n
\n

\n\n\n

Installation

\n

\nThere are currently two ways to get weave. Fist, \nweave is part of SciPy and installed automatically (as a sub-\npackage) whenever SciPy is installed (although the latest version isn't in \nSciPy yet, so use this one for now). Second, since weave is \nuseful outside of the scientific community, it has been setup so that it can be\nused as a stand-alone module. \n\n

\nThe stand-alone version can be downloaded from here. Unix users should grab the \ntar ball (.tgz file) and install it using the following commands.\n\n

\n    tar -xzvf weave-0.2.tar.gz\n    cd weave-0.2\n    python setup.py install\n    
\n\nThis will also install two other packages, scipy_distutils and \nscipy_test. The first is needed by the setup process itself and \nboth are used in the unit-testing process. Numeric is required if you want to \nuse blitz(), but isn't necessary for inline() or \next_tools\n

\nFor Windows users, it's even easier. You can download the click-install .exe \nfile and run it for automatic installation. There is also a .zip file of the\nsource for those interested. It also includes a setup.py file to simplify\ninstallation. \n

\nIf you're using the CVS version, you'll need to install \nscipy_distutils and scipy_test packages (also \navailable from CVS) on your own.\n

\n \nNote: The dependency issue here is a little sticky. I hate to make people \ndownload more than one file (and so I haven't), but distutils doesn't have a \nway to do conditional installation -- at least that I know about. This can \nlead to undesired clobbering of the scipy_test and scipy_distutils modules. \nWhat to do, what to do... Right now it is a very minor issue.\n\n

\n\n

Testing

\nOnce weave is installed, fire up python and run its unit tests.\n\n
\n    >>> import weave\n    >>> weave.test()\n    runs long time... spews tons of output and a few warnings\n    .\n    .\n    .\n    ..............................................................\n    ................................................................\n    ..................................................\n    ----------------------------------------------------------------------\n    Ran 184 tests in 158.418s\n\n    OK\n    \n    >>> \n    
\n\nThis takes a loooong time. On windows, it is usually several minutes. On Unix \nwith remote file systems, I've had it take 15 or so minutes. In the end, it \nshould run about 180 tests and spew some speed results along the way. If you \nget errors, they'll be reported at the end of the output. Please let me know\nwhat if this occurs.\n\nIf you don't have Numeric installed, you'll get some module import errors \nduring the test setup phase for modules that are Numeric specific (blitz_spec, \nblitz_tools, size_check, standard_array_spec, ast_tools), but all test should\npass (about 100 and they should complete in several minutes).\n

\nIf you only want to test a single module of the package, you can do this by\nrunning test() for that specific module.\n\n

\n    >>> import weave.scalar_spec\n    >>> weave.scalar_spec.test()\n    .......\n    ----------------------------------------------------------------------\n    Ran 7 tests in 23.284s\n    
\n\nTesting Notes:\n
    \n
  • \n Windows 1\n

    \n I've had some test fail on windows machines where I have msvc, gcc-2.95.2 \n (in c:\\gcc-2.95.2), and gcc-2.95.3-6 (in c:\\gcc) all installed. My \n environment has c:\\gcc in the path and does not have c:\\gcc-2.95.2 in the \n path. The test process runs very smoothly until the end where several test \n using gcc fail with cpp0 not found by g++. If I check os.system('gcc -v') \n before running tests, I get gcc-2.95.3-6. If I check after running tests \n (and after failure), I get gcc-2.95.2. ??huh??. The os.environ['PATH'] \n still has c:\\gcc first in it and is not corrupted (msvc/distutils messes \n with the environment variables, so we have to undo its work in some \n places). If anyone else sees this, let me know - - it may just be an quirk \n on my machine (unlikely). Testing with the gcc- 2.95.2 installation always \n works.\n

    \n

  • \n
  • \n Windows 2\n

    \n If you run the tests from PythonWin or some other GUI tool, you'll get a\n ton of DOS windows popping up periodically as weave spawns\n the compiler multiple times. Very annoying. Anyone know how to fix this?\n

    \n

  • \n
  • \n wxPython\n

    \n wxPython tests are not enabled by default because importing wxPython on a \n Unix machine without access to a X-term will cause the program to exit. \n Anyone know of a safe way to detect whether wxPython can be imported and \n whether a display exists on a machine? \n

    \n

  • \n
    \n\n

    \n

\n\n\n

Benchmarks

\nThis section has a few benchmarks -- thats all people want to see anyway right? \nThese are mostly taken from running files in the weave/example \ndirectory and also from the test scripts. Without more information about what \nthe test actually do, their value is limited. Still, their here for the \ncurious. Look at the example scripts for more specifics about what problem was \nactually solved by each run. These examples are run under windows 2000 using \nMicrosoft Visual C++ and python2.1 on a 850 MHz PIII laptop with 320 MB of RAM.\nSpeed up is the improvement (degredation) factor of weave compared to \nconventional Python functions. The blitz() comparisons are shown\ncompared to Numeric.\n

\n

\n\n\n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n
\n

inline and ext_tools

Algorithm

Speed up

binary search   1.50
fibonacci (recursive)  82.10
fibonacci (loop)   9.17
return None   0.14
map   1.20
dictionary sort   2.54
vector quantization  37.40
\n

blitz -- double precision

Algorithm

Speed up

a = b + c 512x512   3.05
a = b + c + d 512x512   4.59
5 pt avg. filter, 2D Image 512x512   9.01
Electromagnetics (FDTD) 100x100x100   8.61
\n
\n

\n\nThe benchmarks shown blitz in the best possible light. Numeric \n(at least on my machine) is significantly worse for double precision than it is \nfor single precision calculations. If your interested in single precision \nresults, you can pretty much divide the double precision speed up by 3 and you'll\nbe close.\n\n\n

Inline

\n

\ninline() compiles and executes C/C++ code on the fly. Variables \nin the local and global Python scope are also available in the C/C++ code. \nValues are passed to the C/C++ code by assignment much like variables \nare passed into a standard Python function. Values are returned from the C/C++ \ncode through a special argument called return_val. Also, the contents of \nmutable objects can be changed within the C/C++ code and the changes remain \nafter the C code exits and returns to Python. (more on this later)\n

\nHere's a trivial printf example using inline():\n\n

\n    >>> import weave    \n    >>> a  = 1\n    >>> weave.inline('printf(\"%d\\\\n\",a);',['a'])\n    1\n    
\n

\nIn this, its most basic form, inline(c_code, var_list) requires two \narguments. c_code is a string of valid C/C++ code. \nvar_list is a list of variable names that are passed from \nPython into C/C++. Here we have a simple printf statement that \nwrites the Python variable a to the screen. The first time you run \nthis, there will be a pause while the code is written to a .cpp file, compiled \ninto an extension module, loaded into Python, cataloged for future use, and \nexecuted. On windows (850 MHz PIII), this takes about 1.5 seconds when using \nMicrosoft's C++ compiler (MSVC) and 6-12 seconds using gcc (mingw32 2.95.2). \nAll subsequent executions of the code will happen very quickly because the code \nonly needs to be compiled once. If you kill and restart the interpreter and then \nexecute the same code fragment again, there will be a much shorter delay in the \nfractions of seconds range. This is because weave stores a \ncatalog of all previously compiled functions in an on disk cache. When it sees \na string that has been compiled, it loads the already compiled module and \nexecutes the appropriate function. \n

\n\nNote: If you try the printf example in a GUI shell such as IDLE, \nPythonWin, PyShell, etc., you're unlikely to see the output. This is because the \nC code is writing to stdout, instead of to the GUI window. This doesn't mean \nthat inline doesn't work in these environments -- it only means that standard \nout in C is not the same as the standard out for Python in these cases. Non \ninput/output functions will work as expected.\n\n

\nAlthough effort has been made to reduce the overhead associated with calling \ninline, it is still less efficient for simple code snippets than using \nequivalent Python code. The simple printf example is actually \nslower by 30% or so than using Python print statement. And, it is \nnot difficult to create code fragments that are 8-10 times slower using inline \nthan equivalent Python. However, for more complicated algorithms, \nthe speed up can be worth while -- anywhwere from 1.5- 30 times faster. \nAlgorithms that have to manipulate Python objects (sorting a list) usually only \nsee a factor of 2 or so improvement. Algorithms that are highly computational \nor manipulate Numeric arrays can see much larger improvements. The \nexamples/vq.py file shows a factor of 30 or more improvement on the vector \nquantization algorithm that is used heavily in information theory and \nclassification problems.\n

\n\n\n

More with printf

\n

\nMSVC users will actually see a bit of compiler output that distutils does not\nsupress the first time the code executes:\n\n

    \n    >>> weave.inline(r'printf(\"%d\\n\",a);',['a'])\n    sc_e013937dbc8c647ac62438874e5795131.cpp\n       Creating library C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\n       \\Release\\sc_e013937dbc8c647ac62438874e5795131.lib and object C:\\DOCUME\n       ~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_e013937dbc8c64\n       7ac62438874e5795131.exp\n    1\n    
\n

\nNothing bad is happening, its just a bit annoying. Anyone know how to \nturn this off? \n

\nThis example also demonstrates using 'raw strings'. The r \npreceeding the code string in the last example denotes that this is a 'raw \nstring'. In raw strings, the backslash character is not interpreted as an \nescape character, and so it isn't necessary to use a double backslash to \nindicate that the '\\n' is meant to be interpreted in the C printf \nstatement instead of by Python. If your C code contains a lot\nof strings and control characters, raw strings might make things easier.\nMost of the time, however, standard strings work just as well.\n\n

\nThe printf statement in these examples is formatted to print \nout integers. What happens if a is a string? inline\nwill happily, compile a new version of the code to accept strings as input,\nand execute the code. The result?\n\n

    \n    >>> a = 'string'\n    >>> weave.inline(r'printf(\"%d\\n\",a);',['a'])\n    32956972\n    
\n

\nIn this case, the result is non-sensical, but also non-fatal. In other \nsituations, it might produce a compile time error because a is \nrequired to be an integer at some point in the code, or it could produce a \nsegmentation fault. Its possible to protect against passing \ninline arguments of the wrong data type by using asserts in \nPython.\n\n

    \n    >>> a = 'string'\n    >>> def protected_printf(a):    \n    ...     assert(type(a) == type(1))\n    ...     weave.inline(r'printf(\"%d\\n\",a);',['a'])\n    >>> protected_printf(1)\n     1\n    >>> protected_printf('string')\n    AssertError...\n    
\n\n

\nFor printing strings, the format statement needs to be changed. Also, weave\ndoesn't convert strings to char*. Instead it uses CXX Py::String type, so \nyou have to do a little more work. Here we convert it to a C++ std::string\nand then ask cor the char* version.\n\n

    \n    >>> a = 'string'    \n    >>> weave.inline(r'printf(\"%s\\n\",std::string(a).c_str());',['a'])\n    string\n    
\n

\n \nThis is a little convoluted. Perhaps strings should convert to std::string\nobjects instead of CXX objects. Or maybe to char*.\n\n\n

\nAs in this case, C/C++ code fragments often have to change to accept different \ntypes. For the given printing task, however, C++ streams provide a way of a \nsingle statement that works for integers and strings. By default, the stream \nobjects live in the std (standard) namespace and thus require the use of \nstd::.\n\n

    \n    >>> weave.inline('std::cout << a << std::endl;',['a'])\n    1    \n    >>> a = 'string'\n    >>> weave.inline('std::cout << a << std::endl;',['a'])\n    string\n    
\n \n

\nExamples using printf and cout are included in \nexamples/print_example.py.\n\n\n

More examples

\n\nThis section shows several more advanced uses of inline. It \nincludes a few algorithms from the Python Cookbook \nthat have been re-written in inline C to improve speed as well as a couple \nexamples using Numeric and wxPython.\n\n\n

Binary search

\nLets look at the example of searching a sorted list of integers for a value. \nFor inspiration, we'll use Kalle Svensson's \nbinary_search() algorithm from the Python Cookbook. His recipe follows:\n\n
\n    def binary_search(seq, t):\n        min = 0; max = len(seq) - 1\n        while 1:\n            if max < min:\n                return -1\n            m = (min  + max)  / 2\n            if seq[m] < t: \n                min = m  + 1 \n            elif seq[m] > t: \n                max = m  - 1 \n            else:\n                return m    \n    
\n\nThis Python version works for arbitrary Python data types. The C version below is \nspecialized to handle integer values. There is a little type checking done in \nPython to assure that we're working with the correct data types before heading \ninto C. The variables seq and t don't need to be \ndeclared beacuse weave handles converting and declaring them in \nthe C code. All other temporary variables such as min, max, etc. \nmust be declared -- it is C after all. Here's the new mixed Python/C function:\n\n
    \n    def c_int_binary_search(seq,t):\n        # do a little type checking in Python\n        assert(type(t) == type(1))\n        assert(type(seq) == type([]))\n        \n        # now the C code\n        code = \"\"\"\n               #line 29 \"binary_search.py\"\n               int val, m, min = 0;  \n               int max = seq.length() - 1;\n               PyObject *py_val; \n               for(;;)\n               {\n                   if (max < min  ) \n                   { \n                       return_val =  Py::new_reference_to(Py::Int(-1)); \n                       break;\n                   } \n                   m =  (min + max) /2;\n                   val =    py_to_int(PyList_GetItem(seq.ptr(),m),\"val\"); \n                   if (val  < t) \n                       min = m  + 1;\n                   else if (val >  t)\n                       max = m - 1;\n                   else\n                   {\n                       return_val = Py::new_reference_to(Py::Int(m));\n                       break;\n                   }\n               }\n               \"\"\"\n        return inline(code,['seq','t'])\n    
\n

\nWe have two variables seq and t passed in. \nt is guaranteed (by the assert) to be an integer. \nPython integers are converted to C int types in the transition from Python to \nC. seq is a Python list. By default, it is translated to a CXX \nlist object. Full documentation for the CXX library can be found at its website. The basics are that the CXX \nprovides C++ class equivalents for Python objects that simplify, or at \nleast object orientify, working with Python objects in C/C++. For example, \nseq.length() returns the length of the list. A little more about\nCXX and its class methods, etc. is in the ** type conversions ** section.\n

\n\nNote: CXX uses templates and therefore may be a little less portable than \nanother alternative by Gordan McMillan called SCXX which was inspired by\nCXX. It doesn't use templates so it should compile faster and be more portable.\nSCXX has a few less features, but it appears to me that it would mesh with\nthe needs of weave quite well. Hopefully xxx_spec files will be written\nfor SCXX in the future, and we'll be able to compare on a more empirical\nbasis. Both sets of spec files will probably stick around, it just a question\nof which becomes the default.\n\n

\nMost of the algorithm above looks similar in C to the original Python code. \nThere are two main differences. The first is the setting of \nreturn_val instead of directly returning from the C code with a \nreturn statement. return_val is an automatically \ndefined variable of type PyObject* that is returned from the C \ncode back to Python. You'll have to handle reference counting issues when \nsetting this variable. In this example, CXX classes and functions handle the \ndirty work. All CXX functions and classes live in the namespace \nPy::. The following code converts the integer m to a \nCXX Int() object and then to a PyObject* with an \nincremented reference count using Py::new_reference_to().\n\n

   \n    return_val = Py::new_reference_to(Py::Int(m));\n    
\n

\nThe second big differences shows up in the retrieval of integer values from the \nPython list. The simple Python seq[i] call balloons into a C \nPython API call to grab the value out of the list and then a separate call to \npy_to_int() that converts the PyObject* to an integer. \npy_to_int() includes both a NULL cheack and a \nPyInt_Check() call as well as the conversion call. If either of \nthe checks fail, an exception is raised. The entire C++ code block is executed \nwith in a try/catch block that handles exceptions much like Python \ndoes. This removes the need for most error checking code.\n

\nIt is worth note that CXX lists do have indexing operators that result \nin code that looks much like Python. However, the overhead in using them \nappears to be relatively high, so the standard Python API was used on the \nseq.ptr() which is the underlying PyObject* of the \nList object.\n

\nThe #line directive that is the first line of the C code \nblock isn't necessary, but it's nice for debugging. If the compilation fails \nbecause of the syntax error in the code, the error will be reported as an error \nin the Python file \"binary_search.py\" with an offset from the given line number \n(29 here).\n

\nSo what was all our effort worth in terms of efficiency? Well not a lot in \nthis case. The examples/binary_search.py file runs both Python and C versions \nof the functions As well as using the standard bisect module. If \nwe run it on a 1 million element list and run the search 3000 times (for 0-\n2999), here are the results we get:\n\n

   \n    C:\\home\\ej\\wrk\\scipy\\weave\\examples> python binary_search.py\n    Binary search for 3000 items in 1000000 length list of integers:\n     speed in python: 0.159999966621\n     speed of bisect: 0.121000051498\n     speed up: 1.32\n     speed in c: 0.110000014305\n     speed up: 1.45\n     speed in c(no asserts): 0.0900000333786\n     speed up: 1.78\n    
\n

\nSo, we get roughly a 50-75% improvement depending on whether we use the Python \nasserts in our C version. If we move down to searching a 10000 element list, \nthe advantage evaporates. Even smaller lists might result in the Python \nversion being faster. I'd like to say that moving to Numeric lists (and \ngetting rid of the GetItem() call) offers a substantial speed up, but my \npreliminary efforts didn't produce one. I think the log(N) algorithm is to \nblame. Because the algorithm is nice, there just isn't much time spent \ncomputing things, so moving to C isn't that big of a win. If there are ways to \nreduce conversion overhead of values, this may improve the C/Python speed \nup. Anyone have other explanations or faster code, please let me know.\n\n\n

Dictionary Sort

\n

\nThe demo in examples/dict_sort.py is another example from the Python CookBook. \nThis \nsubmission, by Alex Martelli, demonstrates how to return the values from a \ndictionary sorted by their keys:\n\n

       \n    def sortedDictValues3(adict):\n        keys = adict.keys()\n        keys.sort()\n        return map(adict.get, keys)\n    
\n

\nAlex provides 3 algorithms and this is the 3rd and fastest of the set. The C \nversion of this same algorithm follows:\n\n

       \n    def c_sort(adict):\n        assert(type(adict) == type({}))\n        code = \"\"\"     \n        #line 21 \"dict_sort.py\"  \n        Py::List keys = adict.keys();\n        Py::List items(keys.length()); keys.sort();     \n        PyObject* item = NULL; \n        for(int i = 0;  i < keys.length();i++)\n        {\n            item = PyList_GET_ITEM(keys.ptr(),i);\n            item = PyDict_GetItem(adict.ptr(),item);\n            Py_XINCREF(item);\n            PyList_SetItem(items.ptr(),i,item);              \n        }           \n        return_val = Py::new_reference_to(items);\n        \"\"\"   \n        return inline_tools.inline(code,['adict'],verbose=1)\n    
\n

\nLike the original Python function, the C++ version can handle any Python \ndictionary regardless of the key/value pair types. It uses CXX objects for the \nmost part to declare python types in C++, but uses Python API calls to manipulate \ntheir contents. Again, this choice is made for speed. The C++ version, while\nmore complicated, is about a factor of 2 faster than Python.\n\n

       \n    C:\\home\\ej\\wrk\\scipy\\weave\\examples> python dict_sort.py\n    Dict sort of 1000 items for 300 iterations:\n     speed in python: 0.319999933243\n    [0, 1, 2, 3, 4]\n     speed in c: 0.151000022888\n     speed up: 2.12\n    [0, 1, 2, 3, 4]\n    
\n

\n\n

Numeric -- cast/copy/transpose

\n\nCastCopyTranspose is a function called quite heavily by Linear Algebra routines\nin the Numeric library. Its needed in part because of the row-major memory layout\nof multi-demensional Python (and C) arrays vs. the col-major order of the underlying\nFortran algorithms. For small matrices (say 100x100 or less), a significant\nportion of the common routines such as LU decompisition or singular value decompostion\nare spent in this setup routine. This shouldn't happen. Here is the Python\nversion of the function using standard Numeric operations.\n\n
       \n    def _castCopyAndTranspose(type, array):\n        if a.typecode() == type:\n            cast_array = copy.copy(Numeric.transpose(a))\n        else:\n            cast_array = copy.copy(Numeric.transpose(a).astype(type))\n        return cast_array\n    
\n\nAnd the following is a inline C version of the same function:\n\n
\n    from weave.blitz_tools import blitz_type_factories\n    from weave import scalar_spec\n    from weave import inline\n    def _cast_copy_transpose(type,a_2d):\n        assert(len(shape(a_2d)) == 2)\n        new_array = zeros(shape(a_2d),type)\n        numeric_type = scalar_spec.numeric_to_blitz_type_mapping[type]\n        code = \\\n        \"\"\"  \n        for(int i = 0;i < _Na_2d[0]; i++)  \n            for(int j = 0;  j < _Na_2d[1]; j++)\n                new_array(i,j) = (%s) a_2d(j,i);\n        \"\"\" % numeric_type\n        inline(code,['new_array','a_2d'],\n               type_factories = blitz_type_factories,compiler='gcc')\n        return new_array\n    
\n\nThis example uses blitz++ arrays instead of the standard representation of \nNumeric arrays so that indexing is simplier to write. This is accomplished by \npassing in the blitz++ \"type factories\" to override the standard Python to C++ \ntype conversions. Blitz++ arrays allow you to write clean, fast code, but they \nalso are sloooow to compile (20 seconds or more for this snippet). This is why \nthey aren't the default type used for Numeric arrays (and also because most \ncompilers can't compile blitz arrays...). inline() is also forced \nto use 'gcc' as the compiler because the default compiler on Windows (MSVC) \nwill not compile blitz code. 'gcc' I think will use the standard compiler \non Unix machine instead of explicitly forcing gcc (check this) \n\nComparisons of the Python vs inline C++ code show a factor of 3 speed up. Also \nshown are the results of an \"inplace\" transpose routine that can be used if the \noutput of the linear algebra routine can overwrite the original matrix (this is \noften appropriate). This provides another factor of 2 improvement.\n\n
\n     #C:\\home\\ej\\wrk\\scipy\\weave\\examples> python cast_copy_transpose.py\n    # Cast/Copy/Transposing (150,150)array 1 times\n    #  speed in python: 0.870999932289\n    #  speed in c: 0.25\n    #  speed up: 3.48\n    #  inplace transpose c: 0.129999995232\n    #  speed up: 6.70\n    
\n\n\n

wxPython

\n\ninline knows how to handle wxPython objects. Thats nice in and of\nitself, but it also demonstrates that the type conversion mechanism is reasonably \nflexible. Chances are, it won't take a ton of effort to support special types\nyou might have. The examples/wx_example.py borrows the scrolled window\nexample from the wxPython demo, accept that it mixes inline C code in the middle\nof the drawing function.\n\n
\n    def DoDrawing(self, dc):\n        \n        red = wxNamedColour(\"RED\");\n        blue = wxNamedColour(\"BLUE\");\n        grey_brush = wxLIGHT_GREY_BRUSH;\n        code = \\\n        \"\"\"\n        #line 108 \"wx_example.py\" \n        dc->BeginDrawing();\n        dc->SetPen(wxPen(*red,4,wxSOLID));\n        dc->DrawRectangle(5,5,50,50);\n        dc->SetBrush(*grey_brush);\n        dc->SetPen(wxPen(*blue,4,wxSOLID));\n        dc->DrawRectangle(15, 15, 50, 50);\n        \"\"\"\n        inline(code,['dc','red','blue','grey_brush'])\n        \n        dc.SetFont(wxFont(14, wxSWISS, wxNORMAL, wxNORMAL))\n        dc.SetTextForeground(wxColour(0xFF, 0x20, 0xFF))\n        te = dc.GetTextExtent(\"Hello World\")\n        dc.DrawText(\"Hello World\", 60, 65)\n\n        dc.SetPen(wxPen(wxNamedColour('VIOLET'), 4))\n        dc.DrawLine(5, 65+te[1], 60+te[0], 65+te[1])\n        ...\n    
\n\nHere, some of the Python calls to wx objects were just converted to C++ calls. There\nisn't any benefit, it just demonstrates the capabilities. You might want to use this\nif you have a computationally intensive loop in your drawing code that you want to \nspeed up.\n\nOn windows, you'll have to use the MSVC compiler if you use the standard wxPython\nDLLs distributed by Robin Dunn. Thats because MSVC and gcc, while binary\ncompatible in C, are not binary compatible for C++. In fact, its probably best, no \nmatter what platform you're on, to specify that inline use the same\ncompiler that was used to build wxPython to be on the safe side. There isn't currently\na way to learn this info from the library -- you just have to know. Also, at least\non the windows platform, you'll need to install the wxWindows libraries and link to \nthem. I think there is a way around this, but I haven't found it yet -- I get some\nlinking errors dealing with wxString. One final note. You'll probably have to\ntweak weave/wx_spec.py or weave/wx_info.py for your machine's configuration to\npoint at the correct directories etc. There. That should sufficiently scare people\ninto not even looking at this... :)\n\n
\n

Keyword Options

\n

\nThe basic definition of the inline() function has a slew of \noptional variables. It also takes keyword arguments that are passed to \ndistutils as compiler options. The following is a formatted \ncut/paste of the argument section of inline's doc-string. It \nexplains all of the variables. Some examples using various options will \nfollow.\n\n

       \n    def inline(code,arg_names,local_dict = None, global_dict = None, \n               force = 0, \n               compiler='',\n               verbose = 0, \n               support_code = None,\n               customize=None, \n               type_factories = None, \n               auto_downcast=1,\n               **kw):\n    
\n\n \ninline has quite \na few options as listed below. Also, the keyword arguments for distutils \nextension modules are accepted to specify extra information needed for \ncompiling. \n
\n

inline Arguments:

\n
\n
\n
code
\n \n
\nstring. A string of valid C++ code. It should not \n specify a return statement. Instead it should assign results that need to be \n returned to Python in the return_val. \n
\n\n
arg_names
\n \n
\nlist of strings. A list of Python variable names \n that should be transferred from Python into the C/C++ code. \n
\n\n
local_dict
\n \n
\noptional. dictionary. If specified, it is a \n dictionary of values that should be used as the local scope for the C/C++ \n code. If local_dict is not specified the local dictionary of the calling \n function is used. \n
\n\n
global_dict
\n \n
\noptional. dictionary. If specified, it is a \n dictionary of values that should be used as the global scope for the C/C++ \n code. If global_dict is not specified the global dictionary of the calling \n function is used. \n
\n\n
force
\n \n
\noptional. 0 or 1. default 0. If 1, the C++ code is \n compiled every time inline is called. This is really only useful for \n debugging, and probably only useful if you're editing support_code a lot. \n
\n\n
compiler
\n \n
\noptional. string. The name of compiler to use when compiling. On windows, it \nunderstands 'msvc' and 'gcc' as well as all the compiler names understood by \ndistutils. On Unix, it'll only understand the values understoof by distutils. \n(I should add 'gcc' though to this).\n

\nOn windows, the compiler defaults to the Microsoft C++ compiler. If this isn't \navailable, it looks for mingw32 (the gcc compiler).\n

\nOn Unix, it'll probably use the same compiler that was used when compiling \nPython. Cygwin's behavior should be similar.

\n
\n\n
verbose
\n \n
\noptional. 0,1, or 2. defualt 0. Speficies how much \n much information is printed during the compile phase of inlining code. 0 is \n silent (except on windows with msvc where it still prints some garbage). 1 \n informs you when compiling starts, finishes, and how long it took. 2 prints \n out the command lines for the compilation process and can be useful if you're \n having problems getting code to work. Its handy for finding the name of the \n .cpp file if you need to examine it. verbose has no affect if the \n compilation isn't necessary. \n
\n\n
support_code
\n \n
\noptional. string. A string of valid C++ code \n declaring extra code that might be needed by your compiled function. This \n could be declarations of functions, classes, or structures. \n
\n\n
customize
\n \n
\noptional. base_info.custom_info object. An \n alternative way to specifiy support_code, headers, etc. needed by the \n function see the weave.base_info module for more details. (not sure \n this'll be used much). \n \n
\n
type_factories
\n \n
\noptional. list of type specification factories. These guys are what convert \nPython data types to C/C++ data types. If you'd like to use a different set of \ntype conversions than the default, specify them here. Look in the type \nconversions section of the main documentation for examples.\n
\n
auto_downcast
\n \n
\noptional. 0 or 1. default 1. This only affects functions that have Numeric \narrays as input variables. Setting this to 1 will cause all floating point \nvalues to be cast as float instead of double if all the Numeric arrays are of \ntype float. If even one of the arrays has type double or double complex, all \nvariables maintain there standard types.\n
\n
\n
\n\n

Distutils keywords:

\n
\ninline() also accepts a number of distutils keywords \nfor controlling how the code is compiled. The following descriptions have been \ncopied from Greg Ward's distutils.extension.Extension class doc-\nstrings for convenience:\n\n
\n
sources
\n \n
\n[string] list of source filenames, relative to the \n distribution root (where the setup script lives), in Unix form \n (slash-separated) for portability. Source files may be C, C++, SWIG (.i), \n platform-specific resource files, or whatever else is recognized by the \n \"build_ext\" command as source for a Python extension. Note: The module_path \n file is always appended to the front of this list \n
\n\n
include_dirs
\n \n
\n[string] list of directories to search for C/C++ \n header files (in Unix form for portability) \n
\n\n
define_macros
\n \n
\n[(name : string, value : string|None)] list of \n macros to define; each macro is defined using a 2-tuple, where 'value' is \n either the string to define it to or None to define it without a particular \n value (equivalent of \"#define FOO\" in source or -DFOO on Unix C compiler \n command line) \n
\n
undef_macros
\n \n
\n[string] list of macros to undefine explicitly \n
\n
library_dirs
\n
\n[string] list of directories to search for C/C++ libraries at link time \n
\n
libraries
\n
\n[string] list of library names (not filenames or paths) to link against \n
\n
runtime_library_dirs
\n
\n[string] list of directories to search for C/C++ libraries at run time (for \nshared extensions, this is when the extension is loaded) \n
\n\n
extra_objects
\n \n
\n[string] list of extra files to link with (eg. \n object files not implied by 'sources', static library that must be \n explicitly specified, binary resource files, etc.) \n
\n\n
extra_compile_args
\n \n
\n[string] any extra platform- and compiler-specific \n information to use when compiling the source files in 'sources'. For \n platforms and compilers where \"command line\" makes sense, this is typically \n a list of command-line arguments, but for other platforms it could be \n anything. \n
\n
extra_link_args
\n \n
\n[string] any extra platform- and compiler-specific \n information to use when linking object files together to create the \n extension (or to create a new static Python interpreter). Similar \n interpretation as for 'extra_compile_args'. \n
\n
export_symbols
\n \n
\n[string] list of symbols to be exported from a shared extension. Not used on \nall platforms, and not generally necessary for Python extensions, which \ntypically export exactly one symbol: \"init\" + extension_name. \n
\n
\n
\n\n\n

Keyword Option Examples

\nWe'll walk through several examples here to demonstrate the behavior of \ninline and also how the various arguments are used.\n\nIn the simplest (most) cases, code and arg_names\nare the only arguments that need to be specified. Here's a simple example\nrun on Windows machine that has Microsoft VC++ installed.\n\n
\n    >>> from weave import inline\n    >>> a = 'string'\n    >>> code = \"\"\"\n    ...        int l = a.length();\n    ...        return_val = Py::new_reference_to(Py::Int(l));\n    ...        \"\"\"\n    >>> inline(code,['a'])\n     sc_86e98826b65b047ffd2cd5f479c627f12.cpp\n    Creating\n       library C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_86e98826b65b047ffd2cd5f479c627f12.lib\n    and object C:\\DOCUME~ 1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_86e98826b65b047ff\n    d2cd5f479c627f12.exp\n    6\n    >>> inline(code,['a'])\n    6\n    
\n \nWhen inline is first run, you'll notice that pause and some \ntrash printed to the screen. The \"trash\" is acutually part of the compilers\noutput that distutils does not supress. The name of the extension file, \nsc_bighonkingnumber.cpp, is generated from the md5 check sum\nof the C/C++ code fragment. On Unix or windows machines with only\ngcc installed, the trash will not appear. On the second call, the code \nfragment is not compiled since it already exists, and only the answer is \nreturned. Now kill the interpreter and restart, and run the same code with\na different string.\n\n
\n    >>> from weave import inline\n    >>> a = 'a longer string' \n    >>> code = \"\"\" \n    ...        int l = a.length();\n    ...        return_val = Py::new_reference_to(Py::Int(l));  \n    ...        \"\"\"\n    >>> inline(code,['a'])\n    15\n    
\n

\nNotice this time, inline() did not recompile the code because it\nfound the compiled function in the persistent catalog of functions. There is\na short pause as it looks up and loads the function, but it is much shorter \nthan compiling would require.\n

\nYou can specify the local and global dictionaries if you'd like (much like \nexec or eval() in Python), but if they aren't \nspecified, the \"expected\" ones are used -- i.e. the ones from the function that \ncalled inline() . This is accomplished through a little call \nframe trickery. Here is an example where the local_dict is specified using\nthe same code example from above:\n\n

\n    >>> a = 'a longer string'\n    >>> b = 'an even  longer string' \n    >>> my_dict = {'a':b}\n    >>> inline(code,['a'])\n    15\n    >>> inline(code,['a'],my_dict)\n    21\n    
\n \n

\nEverytime, the code is changed, inline does a \nrecompile. However, changing any of the other options in inline does not\nforce a recompile. The force option was added so that one\ncould force a recompile when tinkering with other variables. In practice,\nit is just as easy to change the code by a single character\n(like adding a space some place) to force the recompile. Note: It also \nmight be nice to add some methods for purging the cache and on disk \ncatalogs.\n

\nI use verbose sometimes for debugging. When set to 2, it'll \noutput all the information (including the name of the .cpp file) that you'd\nexpect from running a make file. This is nice if you need to examine the\ngenerated code to see where things are going haywire. Note that error\nmessages from failed compiles are printed to the screen even if verbose\n is set to 0.\n

\nThe following example demonstrates using gcc instead of the standard msvc \ncompiler on windows using same code fragment as above. Because the example has \nalready been compiled, the force=1 flag is needed to make \ninline() ignore the previously compiled version and recompile \nusing gcc. The verbose flag is added to show what is printed out:\n\n

\n    >>>inline(code,['a'],compiler='gcc',verbose=2,force=1)\n    running build_ext    \n    building 'sc_86e98826b65b047ffd2cd5f479c627f13' extension \n    c:\\gcc-2.95.2\\bin\\g++.exe -mno-cygwin -mdll -O2 -w -Wstrict-prototypes -IC:\n    \\home\\ej\\wrk\\scipy\\weave -IC:\\Python21\\Include -c C:\\DOCUME~1\\eric\\LOCAL\n    S~1\\Temp\\python21_compiled\\sc_86e98826b65b047ffd2cd5f479c627f13.cpp -o C:\\D\n    OCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_86e98826b65b04\n    7ffd2cd5f479c627f13.o    \n    skipping C:\\home\\ej\\wrk\\scipy\\weave\\CXX\\cxxextensions.c (C:\\DOCUME~1\\eri\n    c\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\cxxextensions.o up-to-date)\n    skipping C:\\home\\ej\\wrk\\scipy\\weave\\CXX\\cxxsupport.cxx (C:\\DOCUME~1\\eric\n    \\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\cxxsupport.o up-to-date)\n    skipping C:\\home\\ej\\wrk\\scipy\\weave\\CXX\\IndirectPythonInterface.cxx (C:\\\n    DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\indirectpythonin\n    terface.o up-to-date)\n    skipping C:\\home\\ej\\wrk\\scipy\\weave\\CXX\\cxx_extensions.cxx (C:\\DOCUME~1\\\n    eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\cxx_extensions.o up-to-da\n    te)\n    writing C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_86\n    e98826b65b047ffd2cd5f479c627f13.def\n    c:\\gcc-2.95.2\\bin\\dllwrap.exe --driver-name g++ -mno-cygwin -mdll -static -\n    -output-lib C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\l\n    ibsc_86e98826b65b047ffd2cd5f479c627f13.a --def C:\\DOCUME~1\\eric\\LOCALS~1\\Te\n    mp\\python21_compiled\\temp\\Release\\sc_86e98826b65b047ffd2cd5f479c627f13.def \n    -s C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\temp\\Release\\sc_86e9882\n    6b65b047ffd2cd5f479c627f13.o C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compil\n    ed\\temp\\Release\\cxxextensions.o C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_com\n    piled\\temp\\Release\\cxxsupport.o C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_com\n    piled\\temp\\Release\\indirectpythoninterface.o C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\n    \\python21_compiled\\temp\\Release\\cxx_extensions.o -LC:\\Python21\\libs -lpytho\n    n21 -o C:\\DOCUME~1\\eric\\LOCALS~1\\Temp\\python21_compiled\\sc_86e98826b65b047f\n    fd2cd5f479c627f13.pyd\n    15\n    
\n\nThat's quite a bit of output. verbose=1 just prints the compile\ntime.\n\n
\n    >>>inline(code,['a'],compiler='gcc',verbose=1,force=1)\n    Compiling code...\n    finished compiling (sec):  6.00800001621\n    15\n    
\n\n

\n Note: I've only used the compiler option for switching between 'msvc'\nand 'gcc' on windows. It may have use on Unix also, but I don't know yet.\n\n\n

\nThe support_code argument is likely to be used a lot. It allows \nyou to specify extra code fragments such as function, structure or class \ndefinitions that you want to use in the code string. Note that \nchanges to support_code do not force a recompile. The \ncatalog only relies on code (for performance reasons) to determine \nwhether recompiling is necessary. So, if you make a change to support_code, \nyou'll need to alter code in some way or use the \nforce argument to get the code to recompile. I usually just add \nsome inocuous whitespace to the end of one of the lines in code \nsomewhere. Here's an example of defining a separate method for calculating\nthe string length:\n\n

\n    >>> from weave import inline\n    >>> a = 'a longer string'\n    >>> support_code = \"\"\"\n    ...                PyObject* length(Py::String a)\n    ...                {\n    ...                    int l = a.length();  \n    ...                    return Py::new_reference_to(Py::Int(l)); \n    ...                }\n    ...                \"\"\"        \n    >>> inline(\"return_val = length(a);\",['a'],\n    ...        support_code = support_code)\n    15\n    
\n

\ncustomize is a left over from a previous way of specifying \ncompiler options. It is a custom_info object that can specify \nquite a bit of information about how a file is compiled. These \ninfo objects are the standard way of defining compile information \nfor type conversion classes. However, I don't think they are as handy here, \nespecially since we've exposed all the keyword arguments that distutils can \nhandle. Between these keywords, and the support_code option, I \nthink customize may be obsolete. We'll see if anyone cares to use \nit. If not, it'll get axed in the next version.\n

\nThe type_factories variable is important to people who want to\ncustomize the way arguments are converted from Python to C. We'll talk about\nthis in the next chapter **xx** of this document when we discuss type\nconversions.\n

\nauto_downcast handles one of the big type conversion issues that\nis common when using Numeric arrays in conjunction with Python scalar values.\nIf you have an array of single precision values and multiply that array by a \nPython scalar, the result is upcast to a double precision array because the\nscalar value is double precision. This is not usually the desired behavior\nbecause it can double your memory usage. auto_downcast goes\nsome distance towards changing the casting precedence of arrays and scalars.\nIf your only using single precision arrays, it will automatically downcast all\nscalar values from double to single precision when they are passed into the\nC++ code. This is the default behavior. If you want all values to keep there\ndefault type, set auto_downcast to 0.\n

\n\n\n\n

Returning Values

\n\nPython variables in the local and global scope transfer seemlessly from Python \ninto the C++ snippets. And, if inline were to completely live up\nto its name, any modifications to variables in the C++ code would be reflected\nin the Python variables when control was passed back to Python. For example,\nthe desired behavior would be something like:\n\n
\n    # THIS DOES NOT WORK\n    >>> a = 1\n    >>> weave.inline(\"a++;\",['a'])\n    >>> a\n    2\n    
\n\nInstead you get:\n\n
\n    >>> a = 1\n    >>> weave.inline(\"a++;\",['a'])\n    >>> a\n    1\n    
\n \nVariables are passed into C++ as if you are calling a Python function. Python's \ncalling convention is sometimes called \"pass by assignment\". This means its as \nif a c_a = a assignment is made right before inline \ncall is made and the c_a variable is used within the C++ code. \nThus, any changes made to c_a are not reflected in Python's \na variable. Things do get a little more confusing, however, when \nlooking at variables with mutable types. Changes made in C++ to the contents \nof mutable types are reflected in the Python variables.\n\n
\n    >>> a= [1,2]\n    >>> weave.inline(\"PyList_SetItem(a.ptr(),0,PyInt_FromLong(3));\",['a'])\n    >>> print a\n    [3, 2]\n    
\n\nSo modifications to the contents of mutable types in C++ are seen when control\nis returned to Python. Modifications to immutable types such as tuples,\nstrings, and numbers do not alter the Python variables.\n\nIf you need to make changes to an immutable variable, you'll need to assign\nthe new value to the \"magic\" variable return_val in C++. This\nvalue is returned by the inline() function:\n\n
\n    >>> a = 1\n    >>> a = weave.inline(\"return_val = Py::new_reference_to(Py::Int(a+1));\",['a'])  \n    >>> a\n    2\n    
\n\nThe return_val variable can also be used to return newly created \nvalues. This is possible by returning a tuple. The following trivial example \nillustrates how this can be done:\n\n
       \n    # python version\n    def multi_return():\n        return 1, '2nd'\n    \n    # C version.\n    def c_multi_return():    \n        code =  \"\"\"\n     \t        Py::Tuple results(2);\n     \t        results[0] = Py::Int(1);\n     \t        results[1] = Py::String(\"2nd\");\n     \t        return_val = Py::new_reference_to(results); \t        \n                \"\"\"\n        return inline_tools.inline(code)\n    
\n

\nThe example is available in examples/tuple_return.py. It also\nhas the dubious honor of demonstrating how much inline() can \nslow things down. The C version here is about 10 times slower than the Python\nversion. Of course, something so trivial has no reason to be written in\nC anyway.\n\n\n

The issue with locals()

\n

\ninline passes the locals() and globals() \ndictionaries from Python into the C++ function from the calling function. It \nextracts the variables that are used in the C++ code from these dictionaries, \nconverts then to C++ variables, and then calculates using them. It seems like \nit would be trivial, then, after the calculations were finished to then insert \nthe new values back into the locals() and globals() \ndictionaries so that the modified values were reflected in Python. \nUnfortunately, as pointed out by the Python manual, the locals() dictionary is \nnot writable. \n

\n\nI suspect locals() is not writable because there are some \noptimizations done to speed lookups of the local namespace. I'm guessing local \nlookups don't always look at a dictionary to find values. Can someone \"in the \nknow\" confirm or correct this? Another thing I'd like to know is whether there \nis a way to write to the local namespace of another stack frame from C/C++. If \nso, it would be possible to have some clean up code in compiled functions that \nwrote final values of variables in C++ back to the correct Python stack frame. \nI think this goes a long way toward making inline truely live up \nto its name. I don't think we'll get to the point of creating variables in \nPython for variables created in C -- although I suppose with a C/C++ parser you \ncould do that also.\n\n

\n\n\n

A quick look at the code

\n\nweave generates a C++ file holding an extension function for \neach inline code snippet. These file names are generated using \nfrom the md5 signature of the code snippet and saved to a location specified by \nthe PYTHONCOMPILED environment variable (discussed later). The cpp files are \ngenerally about 200-400 lines long and include quite a few functions to support \ntype conversions, etc. However, the actual compiled function is pretty simple. \nBelow is the familiar printf example:\n\n
\n    >>> import weave    \n    >>> a = 1\n    >>> weave.inline('printf(\"%d\\\\n\",a);',['a'])\n    1\n    
\n\nAnd here is the extension function generated by inline:\n\n
\n    static PyObject* compiled_func(PyObject*self, PyObject* args)\n    {\n        // The Py_None needs an incref before returning\n        PyObject *return_val = NULL;\n        int exception_occured = 0;\n        PyObject *py__locals = NULL;\n        PyObject *py__globals = NULL;\n        PyObject *py_a;\n        py_a = NULL;\n        \n        if(!PyArg_ParseTuple(args,\"OO:compiled_func\",&py__locals,&py__globals))\n            return NULL;\n        try                              \n        {                                \n            PyObject* raw_locals = py_to_raw_dict(py__locals,\"_locals\");\n            PyObject* raw_globals = py_to_raw_dict(py__globals,\"_globals\");\n            int a = py_to_int (get_variable(\"a\",raw_locals,raw_globals),\"a\");\n            /* Here is the inline code */            \n            printf(\"%d\\n\",a);\n            /* I would like to fill in changed locals and globals here... */\n        }                                       \n        catch( Py::Exception& e)           \n        {                                \n            return_val =  Py::Null();    \n            exception_occured = 1;       \n        }                                 \n        if(!return_val && !exception_occured)\n        {\n                                      \n            Py_INCREF(Py_None);              \n            return_val = Py_None;            \n        }\n        /* clean up code */\n        \n        /* return */                              \n        return return_val;           \n    }                                \n    
\n\nEvery inline function takes exactly two arguments -- the local and global\ndictionaries for the current scope. All variable values are looked up out\nof these dictionaries. The lookups, along with all inline code \nexecution, are done within a C++ try block. If the variables\naren't found, or there is an error converting a Python variable to the \nappropriate type in C++, an exception is raised. The C++ exception\nis automatically converted to a Python exception by CXX and returned to Python.\n\nThe py_to_int() function illustrates how the conversions and\nexception handling works. py_to_int first checks that the given PyObject*\npointer is not NULL and is a Python integer. If all is well, it calls the\nPython API to convert the value to an int. Otherwise, it calls\nhandle_bad_type() which gathers information about what went wrong\nand then raises a CXX TypeError which returns to Python as a TypeError.\n\n
\n    int py_to_int(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyInt_Check(py_obj))\n            handle_bad_type(py_obj,\"int\", name);\n        return (int) PyInt_AsLong(py_obj);\n    }\n    
\n\n
\n    void handle_bad_type(PyObject* py_obj, char* good_type, char*  var_name)\n    {\n        char msg[500];\n        sprintf(msg,\"received '%s' type instead of '%s' for variable '%s'\",\n                find_type(py_obj),good_type,var_name);\n        throw Py::TypeError(msg);\n    }\n    \n    char* find_type(PyObject* py_obj)\n    {\n        if(py_obj == NULL) return \"C NULL value\";\n        if(PyCallable_Check(py_obj)) return \"callable\";\n        if(PyString_Check(py_obj)) return \"string\";\n        if(PyInt_Check(py_obj)) return \"int\";\n        if(PyFloat_Check(py_obj)) return \"float\";\n        if(PyDict_Check(py_obj)) return \"dict\";\n        if(PyList_Check(py_obj)) return \"list\";\n        if(PyTuple_Check(py_obj)) return \"tuple\";\n        if(PyFile_Check(py_obj)) return \"file\";\n        if(PyModule_Check(py_obj)) return \"module\";\n        \n        //should probably do more interagation (and thinking) on these.\n        if(PyCallable_Check(py_obj) && PyInstance_Check(py_obj)) return \"callable\";\n        if(PyInstance_Check(py_obj)) return \"instance\"; \n        if(PyCallable_Check(py_obj)) return \"callable\";\n        return \"unkown type\";\n    }\n    
\n\nSince the inline is also executed within the try/catch\nblock, you can use CXX exceptions within your code. It is usually a bad idea\nto directly return from your code, even if an error occurs. This\nskips the clean up section of the extension function. In this simple example,\nthere isn't any clean up code, but in more complicated examples, there may\nbe some reference counting that needs to be taken care of here on converted\nvariables. To avoid this, either uses exceptions or set \nreturn_val to NULL and use if/then's to skip code\nafter errors.\n\n\n

Technical Details

\n

\nThere are several main steps to using C/C++ code withing Python:\n

    \n
  1. Type conversion \n
  2. Generating C/C++ code \n
  3. Compile the code to an extension module \n
  4. Catalog (and cache) the function for future use
  5. \n
\n

\nItems 1 and 2 above are related, but most easily discussed separately. Type \nconversions are customizable by the user if needed. Understanding them is \npretty important for anything beyond trivial uses of inline. \nGenerating the C/C++ code is handled by ext_function and \next_module classes and . For the most part, compiling the code is \nhandled by distutils. Some customizations were needed, but they were \nrelatively minor and do not require changes to distutils itself. Cataloging is \npretty simple in concept, but surprisingly required the most code to implement \n(and still likely needs some work). So, this section covers items 1 and 4 from \nthe list. Item 2 is covered later in the chapter covering the \next_tools module, and distutils is covered by a completely \nseparate document xxx.\n\n

Passing Variables in/out of the C/C++ code

\n\nNote: Passing variables into the C code is pretty straight forward, but there \nare subtlties to how variable modifications in C are returned to Python. see Returning Values for a more thorough discussion of \nthis issue.\n \n \n\n

Type Conversions

\n\n\nNote: Maybe xxx_converter instead of \nxxx_specification is a more descriptive name. Might change in \nfuture version?\n\n\n

\nBy default, inline() makes the following type conversions between\nPython and C++ types.\n

\n\n

\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n

Default Data Type Conversions

\n

Python

\n

C++

   int   int
   float   double
   complex   std::complex
   string   Py::String
   list   Py::List
   dict   Py::Dict
   tuple   Py::Tuple
   file   FILE*
   callable   PyObject*
   instance   PyObject*
   Numeric.array   PyArrayObject*
   wxXXX   wxXXX*
\n
\n

\nThe Py:: namespace is defined by the \nCXX library which has C++ class\nequivalents for many Python types. std:: is the namespace of the\nstandard library in C++.\n

\n\nNote: \n

    \n
  • I haven't figured out how to handle long int yet (I think they are currenlty converted \n to int - - check this). \n \n
  • \nHopefully VTK will be added to the list soon
  • \n
\n\n

\n\nPython to C++ conversions fill in code in several locations in the generated\ninline extension function. Below is the basic template for the\nfunction. This is actually the exact code that is generated by calling\nweave.inline(\"\").\n\n

\n    static PyObject* compiled_func(PyObject*self, PyObject* args)\n    {\n        PyObject *return_val = NULL;\n        int exception_occured = 0;\n        PyObject *py__locals = NULL;\n        PyObject *py__globals = NULL;\n        PyObject *py_a;\n        py_a = NULL;\n    \n        if(!PyArg_ParseTuple(args,\"OO:compiled_func\",&py__locals,&py__globals))\n            return NULL;\n        try\n        {\n            PyObject* raw_locals = py_to_raw_dict(py__locals,\"_locals\");\n            PyObject* raw_globals = py_to_raw_dict(py__globals,\"_globals\");\n            /* argument conversion code */\n            /* inline code */\n            /*I would like to fill in changed locals and globals here...*/\n    \n        }\n        catch( Py::Exception& e)\n        {\n            return_val =  Py::Null();\n            exception_occured = 1;\n        }\n        /* cleanup code */\n        if(!return_val && !exception_occured)\n        {\n    \n            Py_INCREF(Py_None);\n            return_val = Py_None;\n        }\n    \n        return return_val;\n    }\n    
\n\nThe /* inline code */ section is filled with the code passed to\nthe inline() function call. The \n/*argument convserion code*/ and /* cleanup code */\nsections are filled with code that handles conversion from Python to C++\ntypes and code that deallocates memory or manipulates reference counts before\nthe function returns. The following sections demostrate how these two areas\nare filled in by the default conversion methods.\n\n \nNote: I'm not sure I have reference counting correct on a few of these. The \nonly thing I increase/decrease the ref count on is Numeric arrays. If you\nsee an issue, please let me know.\n\n\n\n

Numeric Argument Conversion

\n\nInteger, floating point, and complex arguments are handled in a very similar\nfashion. Consider the following inline function that has a single integer \nvariable passed in:\n\n
\n    >>> a = 1\n    >>> inline(\"\",['a'])\n    
\n\nThe argument conversion code inserted for a is:\n\n
\n    /* argument conversion code */\n    int a = py_to_int (get_variable(\"a\",raw_locals,raw_globals),\"a\");\n    
\n\nget_variable() reads the variable a\nfrom the local and global namespaces. py_to_int() has the following\nform:\n\n
\n    static int py_to_int(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyInt_Check(py_obj))\n            handle_bad_type(py_obj,\"int\", name);\n        return (int) PyInt_AsLong(py_obj);\n    }\n    
\n\nSimilarly, the float and complex conversion routines look like:\n\n
    \n    static double py_to_float(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyFloat_Check(py_obj))\n            handle_bad_type(py_obj,\"float\", name);\n        return PyFloat_AsDouble(py_obj);\n    }\n    \n    static std::complex py_to_complex(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyComplex_Check(py_obj))\n            handle_bad_type(py_obj,\"complex\", name);\n        return std::complex(PyComplex_RealAsDouble(py_obj),\n                                    PyComplex_ImagAsDouble(py_obj));    \n    }\n    
\n\nNumeric conversions do not require any clean up code.\n\n\n

String, List, Tuple, and Dictionary Conversion

\n\nStrings, Lists, Tuples and Dictionary conversions are all converted to \nCXX types by default.\n\nFor the following code, \n\n
\n    >>> a = [1]\n    >>> inline(\"\",['a'])\n    
\n\nThe argument conversion code inserted for a is:\n\n
\n    /* argument conversion code */\n    Py::List a = py_to_list (get_variable(\"a\",raw_locals,raw_globals),\"a\");\n    
\n\nget_variable() reads the variable a\nfrom the local and global namespaces. py_to_list() and its\nfriends has the following form:\n\n
    \n    static Py::List py_to_list(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyList_Check(py_obj))\n            handle_bad_type(py_obj,\"list\", name);\n        return Py::List(py_obj);\n    }\n    \n    static Py::String py_to_string(PyObject* py_obj,char* name)\n    {\n        if (!PyString_Check(py_obj))\n            handle_bad_type(py_obj,\"string\", name);\n        return Py::String(py_obj);\n    }\n\n    static Py::Dict py_to_dict(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyDict_Check(py_obj))\n            handle_bad_type(py_obj,\"dict\", name);\n        return Py::Dict(py_obj);\n    }\n    \n    static Py::Tuple py_to_tuple(PyObject* py_obj,char* name)\n    {\n        if (!py_obj || !PyTuple_Check(py_obj))\n            handle_bad_type(py_obj,\"tuple\", name);\n        return Py::Tuple(py_obj);\n    }\n    
\n\nCXX handles reference counts on for strings, lists, tuples, and dictionaries,\nso clean up code isn't necessary.\n\n\n

File Conversion

\n\nFor the following code, \n\n
\n    >>> a = open(\"bob\",'w')  \n    >>> inline(\"\",['a'])\n    
\n\nThe argument conversion code is:\n\n
\n    /* argument conversion code */\n    PyObject* py_a = get_variable(\"a\",raw_locals,raw_globals);\n    FILE* a = py_to_file(py_a,\"a\");\n    
\n\nget_variable() reads the variable a\nfrom the local and global namespaces. py_to_file() converts\nPyObject* to a FILE* and increments the reference count of the PyObject*:\n\n
\n    FILE* py_to_file(PyObject* py_obj, char* name)\n    {\n        if (!py_obj || !PyFile_Check(py_obj))\n            handle_bad_type(py_obj,\"file\", name);\n    \n        Py_INCREF(py_obj);\n        return PyFile_AsFile(py_obj);\n    }\n    
\n\nBecause the PyObject* was incremented, the clean up code needs to decrement\nthe counter\n\n
\n    /* cleanup code */\n    Py_XDECREF(py_a);\n    
\n\nIts important to understand that file conversion only works on actual files --\ni.e. ones created using the open() command in Python. It does\nnot support converting arbitrary objects that support the file interface into\nC FILE* pointers. This can affect many things. For example, in\ninitial printf() examples, one might be tempted to solve the \nproblem of C and Python IDE's (PythonWin, PyCrust, etc.) writing to different\nstdout and stderr by using fprintf() and passing in \nsys.stdout and sys.stderr. For example, instead of\n\n
\n    >>> weave.inline('printf(\"hello\\\\n\");')\n    
\n \nYou might try:\n\n
\n    >>> buf = sys.stdout\n    >>> weave.inline('fprintf(buf,\"hello\\\\n\");',['buf'])\n    
\n\nThis will work as expected from a standard python interpreter, but in PythonWin,\nthe following occurs:\n\n
\n    >>> buf = sys.stdout\n    >>> weave.inline('fprintf(buf,\"hello\\\\n\");',['buf'])\n    Traceback (most recent call last):\n        File \"\", line 1, in ?\n        File \"C:\\Python21\\weave\\inline_tools.py\", line 315, in inline\n            auto_downcast = auto_downcast,\n        File \"C:\\Python21\\weave\\inline_tools.py\", line 386, in compile_function\n            type_factories = type_factories)\n        File \"C:\\Python21\\weave\\ext_tools.py\", line 197, in __init__\n            auto_downcast, type_factories)\n        File \"C:\\Python21\\weave\\ext_tools.py\", line 390, in assign_variable_types\n            raise TypeError, format_error_msg(errors)\n        TypeError: {'buf': \"Unable to convert variable 'buf' to a C++ type.\"}\n    
\n\nThe traceback tells us that inline() was unable to convert 'buf' to a\nC++ type (If instance conversion was implemented, the error would have occurred at \nruntime instead). Why is this? Let's look at what the buf object \nreally is:\n\n
\n    >>> buf\n    pywin.framework.interact.InteractiveView instance at 00EAD014\n    
\n\nPythonWin has reassigned sys.stdout to a special object that \nimplements the Python file interface. This works great in Python, but since \nthe special object doesn't have a FILE* pointer underlying it, fprintf doesn't \nknow what to do with it (well this will be the problem when instance conversion \nis implemented...).\n\n\n

Callable, Instance, and Module Conversion

\n\nNote: Need to look into how ref counts should be handled. Also,\nInstance and Module conversion are not currently implemented.\n\n\n
\n    >>> def a(): \n        pass\n    >>> inline(\"\",['a'])\n    
\n\nCallable and instance variables are converted to PyObject*. Nothing is done\nto there reference counts.\n\n
\n    /* argument conversion code */\n    PyObject* a = py_to_callable(get_variable(\"a\",raw_locals,raw_globals),\"a\");\n    
\n\nget_variable() reads the variable a\nfrom the local and global namespaces. The py_to_callable() and\npy_to_instance() don't currently increment the ref count.\n\n
    \n    PyObject* py_to_callable(PyObject* py_obj, char* name)\n    {\n        if (!py_obj || !PyCallable_Check(py_obj))\n            handle_bad_type(py_obj,\"callable\", name);    \n        return py_obj;\n    }\n\n    PyObject* py_to_instance(PyObject* py_obj, char* name)\n    {\n        if (!py_obj || !PyFile_Check(py_obj))\n            handle_bad_type(py_obj,\"instance\", name);    \n        return py_obj;\n    }\n    
\n \nThere is no cleanup code for callables, modules, or instances.\n\n\n

Customizing Conversions

\n

\nConverting from Python to C++ types is handled by xxx_specification classes. A \ntype specification class actually serve in two related but different \nroles. The first is in determining whether a Python variable that needs to be \nconverted should be represented by the given class. The second is as a code \ngenerator that generate C++ code needed to convert from Python to C++ types for \na specific variable.\n

\nWhen \n\n

\n    >>> a = 1\n    >>> weave.inline('printf(\"%d\",a);',['a'])\n    
\n \nis called for the first time, the code snippet has to be compiled. In this \nprocess, the variable 'a' is tested against a list of type specifications (the \ndefault list is stored in weave/ext_tools.py). The first \nspecification in the list is used to represent the variable. \n\n

\nExamples of xxx_specification are scattered throughout numerous \n\"xxx_spec.py\" files in the weave package. Closely related to \nthe xxx_specification classes are yyy_info classes. \nThese classes contain compiler, header, and support code information necessary \nfor including a certain set of capabilities (such as blitz++ or CXX support)\nin a compiled module. xxx_specification classes have one or more\nyyy_info classes associated with them.\n\nIf you'd like to define your own set of type specifications, the current best route\nis to examine some of the existing spec and info files. Maybe looking over\nsequence_spec.py and cxx_info.py are a good place to start. After defining \nspecification classes, you'll need to pass them into inline using the \ntype_factories argument. \n\nA lot of times you may just want to change how a specific variable type is \nrepresented. Say you'd rather have Python strings converted to \nstd::string or maybe char* instead of using the CXX \nstring object, but would like all other type conversions to have default \nbehavior. This requires that a new specification class that handles strings\nis written and then prepended to a list of the default type specifications. Since\nit is closer to the front of the list, it effectively overrides the default\nstring specification.\n\nThe following code demonstrates how this is done:\n\n...\n\n\n

The Catalog

\n

\ncatalog.py has a class called catalog that helps keep \ntrack of previously compiled functions. This prevents inline() \nand related functions from having to compile functions everytime they are \ncalled. Instead, catalog will check an in memory cache to see if the function \nhas already been loaded into python. If it hasn't, then it starts searching \nthrough persisent catalogs on disk to see if it finds an entry for the given \nfunction. By saving information about compiled functions to disk, it isn't\nnecessary to re-compile functions everytime you stop and restart the interpreter.\nFunctions are compiled once and stored for future use.\n\n

\nWhen inline(cpp_code) is called the following things happen:\n

    \n
  1. \n A fast local cache of functions is checked for the last function called for \n cpp_code. If an entry for cpp_code doesn't exist in the \n cache or the cached function call fails (perhaps because the function doesn't \n have compatible types) then the next step is to check the catalog. \n
  2. \n The catalog class also keeps an in-memory cache with a list of all the \n functions compiled for cpp_code. If cpp_code has\n ever been called, then this cache will be present (loaded from disk). If\n the cache isn't present, then it is loaded from disk.\n

    \n If the cache is present, each function in the cache is \n called until one is found that was compiled for the correct argument types. If \n none of the functions work, a new function is compiled with the given argument \n types. This function is written to the on-disk catalog as well as into the \n in-memory cache.

    \n
  3. \n When a lookup for cpp_code fails, the catalog looks through \n the on-disk function catalogs for the entries. The PYTHONCOMPILED variable \n determines where to search for these catalogs and in what order. If \n PYTHONCOMPILED is not present several platform dependent locations are \n searched. All functions found for cpp_code in the path are \n loaded into the in-memory cache with functions found earlier in the search \n path closer to the front of the call list.\n

    \n If the function isn't found in the on-disk catalog, \n then the function is compiled, written to the first writable directory in the \n PYTHONCOMPILED path, and also loaded into the in-memory cache.

    \n
  4. \n
\n\n\n

Function Storage: How functions are stored in caches and on disk

\n

\nFunction caches are stored as dictionaries where the key is the entire C++\ncode string and the value is either a single function (as in the \"level 1\"\ncache) or a list of functions (as in the main catalog cache). On disk\ncatalogs are stored in the same manor using standard Python shelves.\n

\nEarly on, there was a question as to whether md5 check sums of the C++\ncode strings should be used instead of the actual code strings. I think this\nis the route inline Perl took. Some (admittedly quick) tests of the md5 vs.\nthe entire string showed that using the entire string was at least a\nfactor of 3 or 4 faster for Python. I think this is because it is more\ntime consuming to compute the md5 value than it is to do look-ups of long\nstrings in the dictionary. Look at the examples/md5_speed.py file for the\ntest run. \n\n\n

Catalog search paths and the PYTHONCOMPILED variable

\n

\nThe default location for catalog files on Unix is is ~/.pythonXX_compiled where \nXX is version of Python being used. If this directory doesn't exist, it is \ncreated the first time a catalog is used. The directory must be writable. If, \nfor any reason it isn't, then the catalog attempts to create a directory based \non your user id in the /tmp directory. The directory permissions are set so \nthat only you have access to the directory. If this fails, I think you're out of \nluck. I don't think either of these should ever fail though. On Windows, a \ndirectory called pythonXX_compiled is created in the user's temporary \ndirectory. \n

\nThe actual catalog file that lives in this directory is a Python shelve with\na platform specific name such as \"nt21compiled_catalog\" so that multiple OSes\ncan share the same file systems without trampling on each other. Along with\nthe catalog file, the .cpp and .so or .pyd files created by inline will live\nin this directory. The catalog file simply contains keys which are the C++\ncode strings with values that are lists of functions. The function lists point\nat functions within these compiled modules. Each function in the lists \nexecutes the same C++ code string, but compiled for different input variables.\n

\nYou can use the PYTHONCOMPILED environment variable to specify alternative\nlocations for compiled functions. On Unix this is a colon (':') separated\nlist of directories. On windows, it is a (';') separated list of directories.\nThese directories will be searched prior to the default directory for a\ncompiled function catalog. Also, the first writable directory in the list\nis where all new compiled function catalogs, .cpp and .so or .pyd files are\nwritten. Relative directory paths ('.' and '..') should work fine in the\nPYTHONCOMPILED variable as should environement variables.\n

\nThere is a \"special\" path variable called MODULE that can be placed in the \nPYTHONCOMPILED variable. It specifies that the compiled catalog should\nreside in the same directory as the module that called it. This is useful\nif an admin wants to build a lot of compiled functions during the build\nof a package and then install them in site-packages along with the package.\nUser's who specify MODULE in their PYTHONCOMPILED variable will have access\nto these compiled functions. Note, however, that if they call the function\nwith a set of argument types that it hasn't previously been built for, the\nnew function will be stored in their default directory (or some other writable\ndirectory in the PYTHONCOMPILED path) because the user will not have write\naccess to the site-packages directory.\n

\nAn example of using the PYTHONCOMPILED path on bash follows:\n\n

\n    PYTHONCOMPILED=MODULE:/some/path;export PYTHONCOMPILED;\n    
\n\nIf you are using python21 on linux, and the module bob.py in site-packages\nhas a compiled function in it, then the catalog search order when calling that\nfunction for the first time in a python session would be:\n\n
\n    /usr/lib/python21/site-packages/linuxpython_compiled\n    /some/path/linuxpython_compiled\n    ~/.python21_compiled/linuxpython_compiled\n    
\n\nThe default location is always included in the search path.\n

\n \nNote: hmmm. see a possible problem here. I should probably make a sub-\ndirectory such as /usr/lib/python21/site-\npackages/python21_compiled/linuxpython_compiled so that library files compiled \nwith python21 are tried to link with python22 files in some strange scenarios. \nNeed to check this.\n\n\n

\nThe in-module cache (in weave.inline_tools reduces the overhead \nof calling inline functions by about a factor of 2. It can be reduced a little \nmore for type loop calls where the same function is called over and over again \nif the cache was a single value instead of a dictionary, but the benefit is \nvery small (less than 5%) and the utility is quite a bit less. So, we'll stick \nwith a dictionary as the cache.\n

\n\n\n

Blitz

\n Note: most of this section is lifted from old documentation. It should be\npretty accurate, but there may be a few discrepancies.\n

\nweave.blitz() compiles Numeric Python expressions for fast \nexecution. For most applications, compiled expressions should provide a \nfactor of 2-10 speed-up over Numeric arrays. Using compiled \nexpressions is meant to be as unobtrusive as possible and works much like \npythons exec statement. As an example, the following code fragment takes a 5 \npoint average of the 512x512 2d image, b, and stores it in array, a:\n\n

\n    from scipy import *  # or from Numeric import *\n    a = ones((512,512), Float64) \n    b = ones((512,512), Float64) \n    # ...do some stuff to fill in b...\n    # now average\n    a[1:-1,1:-1] =  (b[1:-1,1:-1] + b[2:,1:-1] + b[:-2,1:-1] \\\n                   + b[1:-1,2:] + b[1:-1,:-2]) / 5.\n    
\n \nTo compile the expression, convert the expression to a string by putting\nquotes around it and then use weave.blitz:\n\n
\n    import weave\n    expr = \"a[1:-1,1:-1] =  (b[1:-1,1:-1] + b[2:,1:-1] + b[:-2,1:-1]\" \\\n                          \"+ b[1:-1,2:] + b[1:-1,:-2]) / 5.\"\n    weave.blitz(expr)\n    
\n\nThe first time weave.blitz is run for a given expression and \nset of arguements, C++ code that accomplishes the exact same task as the Python \nexpression is generated and compiled to an extension module. This can take up \nto a couple of minutes depending on the complexity of the function. Subsequent \ncalls to the function are very fast. Futher, the generated module is saved \nbetween program executions so that the compilation is only done once for a \ngiven expression and associated set of array types. If the given expression\nis executed with a new set of array types, the code most be compiled again. This\ndoes not overwrite the previously compiled function -- both of them are saved and\navailable for exectution. \n

\nThe following table compares the run times for standard Numeric code and \ncompiled code for the 5 point averaging.\n

\n

\n\n\n\n\n\n
Method Run Time (seconds)
Standard Numeric 0.46349
blitz (1st time compiling) 78.95526
blitz (subsequent calls) 0.05843 (factor of 8 speedup)
\n
\n

\nThese numbers are for a 512x512 double precision image run on a 400 MHz Celeron \nprocessor under RedHat Linux 6.2.\n

\nBecause of the slow compile times, its probably most effective to develop \nalgorithms as you usually do using the capabilities of scipy or the Numeric \nmodule. Once the algorithm is perfected, put quotes around it and execute it \nusing weave.blitz. This provides the standard rapid \nprototyping strengths of Python and results in algorithms that run close to \nthat of hand coded C or Fortran.\n\n\n

Requirements

\n\nCurrently, the weave.blitz has only been tested under Linux \nwith gcc-2.95-3 and on Windows with Mingw32 (2.95.2). Its compiler \nrequirements are pretty heavy duty (see the \nblitz++ home page), so it won't \nwork with just any compiler. Particularly MSVC++ isn't up to snuff. A number \nof other compilers such as KAI++ will also work, but my suspicions are that gcc \nwill get the most use.\n\n\n

Limitations

\n
    \n
  1. \nCurrently, weave.blitz handles all standard mathematic \noperators except for the ** power operator. The built-in trigonmetric, log, \nfloor/ceil, and fabs functions might work (but haven't been tested). It also \nhandles all types of array indexing supported by the Numeric module. \n

    \nweave.blitz does not currently support operations that use \narray broadcasting, nor have any of the special purpose functions in Numeric \nsuch as take, compress, etc. been implemented. Note that there are no obvious \nreasons why most of this functionality cannot be added to scipy.weave, so it \nwill likely trickle into future versions. Using slice() objects \ndirectly instead of start:stop:step is also not supported.\n

  2. \n
  3. \nCurrently Python only works on expressions that include assignment such as\n \n
    \n    >>> result = b + c + d\n    
    \n\nThis means that the result array must exist before calling \nweave.blitz. Future versions will allow the following:\n\n
    \n    >>> result = weave.blitz_eval(\"b + c + d\")\n    
    \n
  4. \n
  5. \nweave.blitz works best when algorithms can be expressed in a \n\"vectorized\" form. Algorithms that have a large number of if/thens and other \nconditions are better hand written in C or Fortran. Further, the restrictions \nimposed by requiring vectorized expressions sometimes preclude the use of more \nefficient data structures or algorithms. For maximum speed in these cases, \nhand-coded C or Fortran code is the only way to go.\n
  6. \n
  7. \nweave.blitz can produce different results than Numeric in certain \nsituations. It can happen when the array receiving the results of a \ncalculation is also used during the calculation. The Numeric behavior is to \ncarry out the entire calculation on the right hand side of an equation and \nstore it in a temporary array. This temprorary array is assigned to the array \non the left hand side of the equation. blitz, on the other hand, does a \n\"running\" calculation of the array elements assigning values from the right hand\nside to the elements on the left hand side immediately after they are calculated.\nHere is an example, provided by Prabhu Ramachandran, where this happens:\n\n
    \n        # 4 point average.\n        >>> expr = \"u[1:-1, 1:-1] = (u[0:-2, 1:-1] + u[2:, 1:-1] + \"\\\n        ...                \"u[1:-1,0:-2] + u[1:-1, 2:])*0.25\"\n        >>> u = zeros((5, 5), 'd'); u[0,:] = 100\n        >>> exec (expr)\n        >>> u\n        array([[ 100.,  100.,  100.,  100.,  100.],\n               [   0.,   25.,   25.,   25.,    0.],\n               [   0.,    0.,    0.,    0.,    0.],\n               [   0.,    0.,    0.,    0.,    0.],\n               [   0.,    0.,    0.,    0.,    0.]])\n        \n        >>> u = zeros((5, 5), 'd'); u[0,:] = 100\n        >>> weave.blitz (expr)\n        >>> u\n        array([[ 100.  ,  100.       ,  100.       ,  100.       ,  100. ],\n               [   0.  ,   25.       ,   31.25     ,   32.8125   ,    0. ],\n               [   0.  ,    6.25     ,    9.375    ,   10.546875 ,    0. ],\n               [   0.  ,    1.5625   ,    2.734375 ,    3.3203125,    0. ],\n               [   0.  ,    0.       ,    0.       ,    0.       ,    0. ]])    \n        
    \n \n You can prevent this behavior by using a temporary array.\n \n
    \n        >>> u = zeros((5, 5), 'd'); u[0,:] = 100\n        >>> temp = zeros((4, 4), 'd');\n        >>> expr = \"temp = (u[0:-2, 1:-1] + u[2:, 1:-1] + \"\\\n        ...        \"u[1:-1,0:-2] + u[1:-1, 2:])*0.25;\"\\\n        ...        \"u[1:-1,1:-1] = temp\"\n        >>> weave.blitz (expr)\n        >>> u\n        array([[ 100.,  100.,  100.,  100.,  100.],\n               [   0.,   25.,   25.,   25.,    0.],\n               [   0.,    0.,    0.,    0.,    0.],\n               [   0.,    0.,    0.,    0.,    0.],\n               [   0.,    0.,    0.,    0.,    0.]])\n        
    \n \n
  8. \n
  9. \nOne other point deserves mention lest people be confused. \nweave.blitz is not a general purpose Python->C compiler. It \nonly works for expressions that contain Numeric arrays and/or \nPython scalar values. This focused scope concentrates effort on the \ncompuationally intensive regions of the program and sidesteps the difficult \nissues associated with a general purpose Python->C compiler.\n
  10. \n
\n\n\n

Numeric efficiency issues: What compilation buys you

\n\nSome might wonder why compiling Numeric expressions to C++ is beneficial since \noperations on Numeric array operations are already executed within C loops. \nThe problem is that anything other than the simplest expression are executed in \nless than optimal fashion. Consider the following Numeric expression:\n\n
\n    a = 1.2 * b + c * d\n    
\n \nWhen Numeric calculates the value for the 2d array, a, it does \nthe following steps:\n\n
\n    temp1 = 1.2 * b\n    temp2 = c * d\n    a = temp1 + temp2\n    
\n \nTwo things to note. Since c is an (perhaps large) array, a large \ntemporary array must be created to store the results of 1.2 * b. \nThe same is true for temp2. Allocation is slow. The second thing \nis that we have 3 loops executing, one to calculate temp1, one for \ntemp2 and one for adding them up. A C loop for the same problem \nmight look like:\n\n
\n    for(int i = 0; i < M; i++)\n        for(int j = 0; j < N; j++)\n            a[i,j] = 1.2 * b[i,j] + c[i,j] * d[i,j]\n    
\n \nHere, the 3 loops have been fused into a single loop and there is no longer\na need for a temporary array. This provides a significant speed improvement\nover the above example (write me and tell me what you get). \n

\nSo, converting Numeric expressions into C/C++ loops that fuse the loops and \neliminate temporary arrays can provide big gains. The goal then,is to convert \nNumeric expression to C/C++ loops, compile them in an extension module, and \nthen call the compiled extension function. The good news is that there is an \nobvious correspondence between the Numeric expression above and the C loop. The \nbad news is that Numeric is generally much more powerful than this simple \nexample illustrates and handling all possible indexing possibilities results in \nloops that are less than straight forward to write. (take a peak in Numeric for \nconfirmation). Luckily, there are several available tools that simplify the \nprocess.\n\n\n

The Tools

\n\nweave.blitz relies heavily on several remarkable tools. On the \nPython side, the main facilitators are Jermey Hylton's parser module and Jim \nHuginin's Numeric module. On the compiled language side, Todd Veldhuizen's \nblitz++ array library, written in C++ (shhhh. don't tell David Beazley), does \nthe heavy lifting. Don't assume that, because it's C++, it's much slower than C \nor Fortran. Blitz++ uses a jaw dropping array of template techniques \n(metaprogramming, template expression, etc) to convert innocent looking and \nreadable C++ expressions into to code that usually executes within a few \npercentage points of Fortran code for the same problem. This is good. \nUnfortunately all the template raz-ma-taz is very expensive to compile, so the \n200 line extension modules often take 2 or more minutes to compile. This isn't so \ngood. weave.blitz works to minimize this issue by remembering \nwhere compiled modules live and reusing them instead of re-compiling every time \na program is re-run.\n\n\n

Parser

\nTearing Numeric expressions apart, examining the pieces, and then rebuilding \nthem as C++ (blitz) expressions requires a parser of some sort. I can imagine \nsomeone attacking this problem with regular expressions, but it'd likely be \nugly and fragile. Amazingly, Python solves this problem for us. It actually \nexposes its parsing engine to the world through the parser module. \nThe following fragment creates an Abstract Syntax Tree (AST) object for the \nexpression and then converts to a (rather unpleasant looking) deeply nested list \nrepresentation of the tree. \n \n
\n    >>> import parser\n    >>> import scipy.weave.misc\n    >>> ast = parser.suite(\"a = b * c + d\")\n    >>> ast_list = ast.tolist()\n    >>> sym_list = scipy.weave.misc.translate_symbols(ast_list)\n    >>> pprint.pprint(sym_list)\n    ['file_input',\n     ['stmt',\n      ['simple_stmt',\n       ['small_stmt',\n        ['expr_stmt',\n         ['testlist',\n          ['test',\n           ['and_test',\n            ['not_test',\n             ['comparison',\n              ['expr',\n               ['xor_expr',\n                ['and_expr',\n                 ['shift_expr',\n                  ['arith_expr',\n                   ['term',\n                    ['factor', ['power', ['atom', ['NAME', 'a']]]]]]]]]]]]]]],\n         ['EQUAL', '='],\n         ['testlist',\n          ['test',\n           ['and_test',\n            ['not_test',\n             ['comparison',\n              ['expr',\n               ['xor_expr',\n                ['and_expr',\n                 ['shift_expr',\n                  ['arith_expr',\n                   ['term',\n                    ['factor', ['power', ['atom', ['NAME', 'b']]]],\n                    ['STAR', '*'],\n                    ['factor', ['power', ['atom', ['NAME', 'c']]]]],\n                   ['PLUS', '+'],\n                   ['term',\n                    ['factor', ['power', ['atom', ['NAME', 'd']]]]]]]]]]]]]]]]],\n       ['NEWLINE', '']]],\n     ['ENDMARKER', '']]\n    
\n\nDespite its looks, with some tools developed by Jermey H., its possible\nto search these trees for specific patterns (sub-trees), extract the \nsub-tree, manipulate them converting python specific code fragments\nto blitz code fragments, and then re-insert it in the parse tree. The parser\nmodule documentation has some details on how to do this. Traversing the \nnew blitzified tree, writing out the terminal symbols as you go, creates\nour new blitz++ expression string.\n\n \n

Blitz and Numeric

\nThe other nice discovery in the project is that the data structure used\nfor Numeric arrays and blitz arrays is nearly identical. Numeric stores\n\"strides\" as byte offsets and blitz stores them as element offsets, but\nother than that, they are the same. Further, most of the concept and\ncapabilities of the two libraries are remarkably similar. It is satisfying \nthat two completely different implementations solved the problem with \nsimilar basic architectures. It is also fortitous. The work involved in \nconverting Numeric expressions to blitz expressions was greatly diminished.\nAs an example, consider the code for slicing an array in Python with a\nstride:\n\n
\n    >>> a = b[0:4:2] + c\n    >>> a\n    [0,2,4]\n    
\n\n\nIn Blitz it is as follows:\n\n
\n    Array<2,int> b(10);\n    Array<2,int> c(3);\n    // ...\n    Array<2,int> a = b(Range(0,3,2)) + c;\n    
\n\n\nHere the range object works exactly like Python slice objects with the exception\nthat the top index (3) is inclusive where as Python's (4) is exclusive. Other \ndifferences include the type declaraions in C++ and parentheses instead of \nbrackets for indexing arrays. Currently, weave.blitz handles the \ninclusive/exclusive issue by subtracting one from upper indices during the\ntranslation. An alternative that is likely more robust/maintainable in the \nlong run, is to write a PyRange class that behaves like Python's range. \nThis is likely very easy.\n

\nThe stock blitz also doesn't handle negative indices in ranges. The current \nimplementation of the blitz() has a partial solution to this \nproblem. It calculates and index that starts with a '-' sign by subtracting it \nfrom the maximum index in the array so that:\n\n

\n                    upper index limit\n                        /-----\\\n    b[:-1] -> b(Range(0,Nb[0]-1-1))\n    
\n\nThis approach fails, however, when the top index is calculated from other \nvalues. In the following scenario, if i+j evaluates to a negative \nvalue, the compiled code will produce incorrect results and could even core-\ndump. Right now, all calculated indices are assumed to be positive.\n \n
\n    b[:i-j] -> b(Range(0,i+j))\n    
\n\nA solution is to calculate all indices up front using if/then to handle the\n+/- cases. This is a little work and results in more code, so it hasn't been\ndone. I'm holding out to see if blitz++ can be modified to handle negative\nindexing, but haven't looked into how much effort is involved yet. While it \nneeds fixin', I don't think there is a ton of code where this is an issue.\n

\nThe actual translation of the Python expressions to blitz expressions is \ncurrently a two part process. First, all x:y:z slicing expression are removed\nfrom the AST, converted to slice(x,y,z) and re-inserted into the tree. Any\nmath needed on these expressions (subtracting from the \nmaximum index, etc.) are also preformed here. _beg and _end are used as special\nvariables that are defined as blitz::fromBegin and blitz::toEnd.\n\n

\n    a[i+j:i+j+1,:] = b[2:3,:] \n    
\n\nbecomes a more verbose:\n \n
\n    a[slice(i+j,i+j+1),slice(_beg,_end)] = b[slice(2,3),slice(_beg,_end)]\n    
\n \nThe second part does a simple string search/replace to convert to a blitz \nexpression with the following translations:\n\n
\n    slice(_beg,_end) -> _all  # not strictly needed, but cuts down on code.\n    slice            -> blitz::Range\n    [                -> (\n    ]                -> )\n    _stp             -> 1\n    
\n\n_all is defined in the compiled function as \nblitz::Range.all(). These translations could of course happen \ndirectly in the syntax tree. But the string replacement is slightly easier. \nNote that name spaces are maintained in the C++ code to lessen the likelyhood \nof name clashes. Currently no effort is made to detect name clashes. A good \nrule of thumb is don't use values that start with '_' or 'py_' in compiled \nexpressions and you'll be fine.\n\n \n

Type definitions and coersion

\n\nSo far we've glossed over the dynamic vs. static typing issue between Python \nand C++. In Python, the type of value that a variable holds can change\nthrough the course of program execution. C/C++, on the other hand, forces you\nto declare the type of value a variables will hold prior at compile time.\nweave.blitz handles this issue by examining the types of the\nvariables in the expression being executed, and compiling a function for those\nexplicit types. For example:\n\n
\n    a = ones((5,5),Float32)\n    b = ones((5,5),Float32)\n    weave.blitz(\"a = a + b\")\n    
\n\nWhen compiling this expression to C++, weave.blitz sees that the\nvalues for a and b in the local scope have type Float32, or 'float'\non a 32 bit architecture. As a result, it compiles the function using \nthe float type (no attempt has been made to deal with 64 bit issues).\nIt also goes one step further. If all arrays have the same type, a templated\nversion of the function is made and instantiated for float, double, \ncomplex, and complex arrays. Note: This feature has been \nremoved from the current version of the code. Each version will be compiled\nseparately \n

\nWhat happens if you call a compiled function with array types that are \ndifferent than the ones for which it was originally compiled? No biggie, you'll \njust have to wait on it to compile a new version for your new types. This \ndoesn't overwrite the old functions, as they are still accessible. See the \ncatalog section in the inline() documentation to see how this is handled. \nSuffice to say, the mechanism is transparent to the user and behaves \nlike dynamic typing with the occasional wait for compiling newly typed \nfunctions.\n

\nWhen working with combined scalar/array operations, the type of the array is \nalways used. This is similar to the savespace flag that was recently \nadded to Numeric. This prevents issues with the following expression perhaps \nunexpectedly being calculated at a higher (more expensive) precision that can \noccur in Python:\n\n

\n    >>> a = array((1,2,3),typecode = Float32)\n    >>> b = a * 2.1 # results in b being a Float64 array.\n    
\n \nIn this example, \n\n
\n    >>> a = ones((5,5),Float32)\n    >>> b = ones((5,5),Float32)\n    >>> weave.blitz(\"b = a * 2.1\")\n    
\n \nthe 2.1 is cast down to a float before carrying out \nthe operation. If you really want to force the calculation to be a \ndouble, define a and b as \ndouble arrays.\n

\nOne other point of note. Currently, you must include both the right hand side \nand left hand side (assignment side) of your equation in the compiled \nexpression. Also, the array being assigned to must be created prior to calling \nweave.blitz. I'm pretty sure this is easily changed so that a \ncompiled_eval expression can be defined, but no effort has been made to \nallocate new arrays (and decern their type) on the fly.\n\n \n

Cataloging Compiled Functions

\n\nSee the Cataloging functions section in the \nweave.inline() documentation.\n\n \n

Checking Array Sizes

\n\nSurprisingly, one of the big initial problems with compiled code was making\nsure all the arrays in an operation were of compatible type. The following\ncase is trivially easy:\n\n
\n    a = b + c\n    
\n \nIt only requires that arrays a, b, and c \nhave the same shape. However, expressions like:\n\n
\n    a[i+j:i+j+1,:] = b[2:3,:] + c\n    
\n\nare not so trivial. Since slicing is involved, the size of the slices, not the \ninput arrays must be checked. Broadcasting complicates things further because \narrays and slices with different dimensions and shapes may be compatible for \nmath operations (broadcasting isn't yet supported by \nweave.blitz). Reductions have a similar effect as their \nresults are different shapes than their input operand. The binary operators in \nNumeric compare the shapes of their two operands just before they operate on \nthem. This is possible because Numeric treats each operation independently. \nThe intermediate (temporary) arrays created during sub-operations in an \nexpression are tested for the correct shape before they are combined by another \noperation. Because weave.blitz fuses all operations into a \nsingle loop, this isn't possible. The shape comparisons must be done and \nguaranteed compatible before evaluating the expression.\n

\nThe solution chosen converts input arrays to \"dummy arrays\" that only represent \nthe dimensions of the arrays, not the data. Binary operations on dummy arrays \ncheck that input array sizes are comptible and return a dummy array with the \nsize correct size. Evaluating an expression of dummy arrays traces the \nchanging array sizes through all operations and fails if incompatible array \nsizes are ever found. \n

\nThe machinery for this is housed in weave.size_check. It \nbasically involves writing a new class (dummy array) and overloading it math \noperators to calculate the new sizes correctly. All the code is in Python and \nthere is a fair amount of logic (mainly to handle indexing and slicing) so the \noperation does impose some overhead. For large arrays (ie. 50x50x50), the \noverhead is negligible compared to evaluating the actual expression. For small \narrays (ie. 16x16), the overhead imposed for checking the shapes with this \nmethod can cause the weave.blitz to be slower than evaluating \nthe expression in Python. \n

\nWhat can be done to reduce the overhead? (1) The size checking code could be \nmoved into C. This would likely remove most of the overhead penalty compared \nto Numeric (although there is also some calling overhead), but no effort has \nbeen made to do this. (2) You can also call weave.blitz with\ncheck_size=0 and the size checking isn't done. However, if the \nsizes aren't compatible, it can cause a core-dump. So, foregoing size_checking\nisn't advisable until your code is well debugged.\n\n \n

Creating the Extension Module

\n\nweave.blitz uses the same machinery as \nweave.inline to build the extension module. The only difference\nis the code included in the function is automatically generated from the\nNumeric array expression instead of supplied by the user.\n\n\n

Extension Modules

\nweave.inline and weave.blitz are high level tools\nthat generate extension modules automatically. Under the covers, they use several\nclasses from weave.ext_tools to help generate the extension module.\nThe main two classes are ext_module and ext_function (I'd\nlike to add ext_class and ext_method also). These classes\nsimplify the process of generating extension modules by handling most of the \"boiler\nplate\" code automatically.\n\n\nNote: inline actually sub-classes weave.ext_tools.ext_function \nto generate slightly different code than the standard ext_function.\nThe main difference is that the standard class converts function arguments to\nC types, while inline always has two arguments, the local and global dicts, and\nthe grabs the variables that need to be convereted to C from these.\n\n\n\n

A Simple Example

\nThe following simple example demonstrates how to build an extension module within\na Python function:\n\n
\n    # examples/increment_example.py\n    from weave import ext_tools\n    \n    def build_increment_ext():\n        \"\"\" Build a simple extension with functions that increment numbers.\n            The extension will be built in the local directory.\n        \"\"\"        \n        mod = ext_tools.ext_module('increment_ext')\n    \n        a = 1 # effectively a type declaration for 'a' in the \n              # following functions.\n    \n        ext_code = \"return_val = Py::new_reference_to(Py::Int(a+1));\"    \n        func = ext_tools.ext_function('increment',ext_code,['a'])\n        mod.add_function(func)\n        \n        ext_code = \"return_val = Py::new_reference_to(Py::Int(a+2));\"    \n        func = ext_tools.ext_function('increment_by_2',ext_code,['a'])\n        mod.add_function(func)\n                \n        mod.compile()\n    
\n\n\nThe function build_increment_ext() creates an extension module \nnamed increment_ext and compiles it to a shared library (.so or \n.pyd) that can be loaded into Python.. increment_ext contains two \nfunctions, increment and increment_by_2. \n\nThe first line of build_increment_ext(),\n\n
\n        mod = ext_tools.ext_module('increment_ext') \n    
\n\ncreates an ext_module instance that is ready to have \next_function instances added to it. ext_function \ninstances are created much with a calling convention similar to \nweave.inline(). The most common call includes a C/C++ code \nsnippet and a list of the arguments for the function. The following\n\n
\n        ext_code = \"return_val = Py::new_reference_to(Py::Int(a+1));\"    \n        func = ext_tools.ext_function('increment',ext_code,['a'])\n    
\n \ncreates a C/C++ extension function that is equivalent to the following Python\nfunction:\n\n
\n        def increment(a):\n            return a + 1\n    
\n\nA second method is also added to the module and then,\n\n
\n        mod.compile()\n    
\n\nis called to build the extension module. By default, the module is created\nin the current working directory.\n\nThis example is available in the examples/increment_example.py file\nfound in the weave directory. At the bottom of the file in the\nmodule's \"main\" program, an attempt to import increment_ext without\nbuilding it is made. If this fails (the module doesn't exist in the PYTHONPATH), \nthe module is built by calling build_increment_ext(). This approach\nonly takes the time consuming ( a few seconds for this example) process of building\nthe module if it hasn't been built before.\n\n
\n    if __name__ == \"__main__\":\n        try:\n            import increment_ext\n        except ImportError:\n            build_increment_ext()\n            import increment_ext\n        a = 1\n        print 'a, a+1:', a, increment_ext.increment(a)\n        print 'a, a+2:', a, increment_ext.increment_by_2(a)           \n    
\n\n\nNote: If we were willing to always pay the penalty of building the C++ code for \na module, we could store the md5 checksum of the C++ code along with some \ninformation about the compiler, platform, etc. Then, \next_module.compile() could try importing the module before it actually\ncompiles it, check the md5 checksum and other meta-data in the imported module\nwith the meta-data of the code it just produced and only compile the code if\nthe module didn't exist or the meta-data didn't match. This would reduce the\nabove code to:\n\n
\n    if __name__ == \"__main__\":\n        build_increment_ext()\n\n        a = 1\n        print 'a, a+1:', a, increment_ext.increment(a)\n        print 'a, a+2:', a, increment_ext.increment_by_2(a)           \n    
\n\nNote: There would always be the overhead of building the C++ code, but it would only actually compile the code once. You pay a little in overhead and get cleaner\n\"import\" code. Needs some thought.\n\n

\n\nIf you run increment_example.py from the command line, you get\nthe following:\n\n

\n    [eric@n0]$ python increment_example.py\n    a, a+1: 1 2\n    a, a+2: 1 3\n    
\n\nIf the module didn't exist before it was run, the module is created. If it did\nexist, it is just imported and used.\n\n\n

Fibonacci Example

\nexamples/fibonacci.py provides a little more complex example of \nhow to use ext_tools. Fibonacci numbers are a series of numbers \nwhere each number in the series is the sum of the previous two: 1, 1, 2, 3, 5, \n8, etc. Here, the first two numbers in the series are taken to be 1. One \napproach to calculating Fibonacci numbers uses recursive function calls. In \nPython, it might be written as:\n\n
\n    def fib(a):\n        if a <= 2:\n            return 1\n        else:\n            return fib(a-2) + fib(a-1)\n    
\n\nIn C, the same function would look something like this:\n\n
\n     int fib(int a)\n     {                   \n         if(a <= 2)\n             return 1;\n         else\n             return fib(a-2) + fib(a-1);  \n     }                      \n    
\n\nRecursion is much faster in C than in Python, so it would be beneficial\nto use the C version for fibonacci number calculations instead of the\nPython version. We need an extension function that calls this C function\nto do this. This is possible by including the above code snippet as \n\"support code\" and then calling it from the extension function. Support \ncode snippets (usually structure definitions, helper functions and the like)\nare inserted into the extension module C/C++ file before the extension\nfunction code. Here is how to build the C version of the fibonacci number\ngenerator:\n\n
\ndef build_fibonacci():\n    \"\"\" Builds an extension module with fibonacci calculators.\n    \"\"\"\n    mod = ext_tools.ext_module('fibonacci_ext')\n    a = 1 # this is effectively a type declaration\n    \n    # recursive fibonacci in C \n    fib_code = \"\"\"\n                   int fib1(int a)\n                   {                   \n                       if(a <= 2)\n                           return 1;\n                       else\n                           return fib1(a-2) + fib1(a-1);  \n                   }                         \n               \"\"\"\n    ext_code = \"\"\"\n                   int val = fib1(a);\n                   return_val = Py::new_reference_to(Py::Int(val));\n               \"\"\"    \n    fib = ext_tools.ext_function('fib',ext_code,['a'])\n    fib.customize.add_support_code(fib_code)\n    mod.add_function(fib)\n\n    mod.compile()\n\n    
\n\nXXX More about custom_info, and what xxx_info instances are good for.\n\n

\n\nNote: recursion is not the fastest way to calculate fibonacci numbers, but this \napproach serves nicely for this example.\n\n

\n\n

Customizing Type Conversions -- Type Factories

\nnot written\n\n

Things I wish weave did

\n\nIt is possible to get name clashes if you uses a variable name that is already defined\nin a header automatically included (such as stdio.h) For instance, if you\ntry to pass in a variable named stdout, you'll get a cryptic error report\ndue to the fact that stdio.h also defines the name. weave\nshould probably try and handle this in some way.\n\nOther things...", "methods": [], "methods_before": [], "changed_methods": [], "nloc": null, "complexity": null, "token_count": null, "diff_parsed": { "added": [ "", "", "Other things..." ], "deleted": [ "", "", "Other things..." ] } } ] } ]