ngram
listlengths
0
82k
[ "WorkerPool # Public identifiers that require documentation. __all__ = (\"compute_hashes\",", "Author: <NAME> <<EMAIL>> # Last Change: March 6, 2020 #", "__all__ = (\"compute_hashes\", \"hash_worker\") def compute_hashes(filename, block_size, method, concurrency): \"\"\"Compute", "pool: for offset, digest in pool: yield offset, digest def", "by rsync. # # Author: <NAME> <<EMAIL>> # Last Change:", ") as pool: for offset, digest in pool: yield offset,", "def hash_worker(offset, block_size, filename, method): \"\"\"Worker function to be run", ":mod:`multiprocessing` and :mod:`pdiffcopy.mp`.\"\"\" # Standard library modules. import functools import", "offset, digest in pool: yield offset, digest def hash_worker(offset, block_size,", "with open(filename, \"rb\") as handle: handle.seek(offset) context = hashlib.new(method) context.update(handle.read(block_size))", "our package. from pdiffcopy.mp import WorkerPool # Public identifiers that", "# Public identifiers that require documentation. __all__ = (\"compute_hashes\", \"hash_worker\")", "filename, method): \"\"\"Worker function to be run in child processes.\"\"\"", "checksums of a file in blocks (parallel).\"\"\" with WorkerPool( concurrency=concurrency,", "<NAME> <<EMAIL>> # Last Change: March 6, 2020 # URL:", "block_size), worker_fn=functools.partial(hash_worker, block_size=block_size, filename=filename, method=method), ) as pool: for offset,", "synchronization inspired by rsync. # # Author: <NAME> <<EMAIL>> #", "be run in child processes.\"\"\" with open(filename, \"rb\") as handle:", "concurrency): \"\"\"Compute checksums of a file in blocks (parallel).\"\"\" with", "from pdiffcopy.mp import WorkerPool # Public identifiers that require documentation.", "open(filename, \"rb\") as handle: handle.seek(offset) context = hashlib.new(method) context.update(handle.read(block_size)) return", "block_size, method, concurrency): \"\"\"Compute checksums of a file in blocks", "yield offset, digest def hash_worker(offset, block_size, filename, method): \"\"\"Worker function", "Change: March 6, 2020 # URL: https://pdiffcopy.readthedocs.io \"\"\"Parallel hashing of", "and :mod:`pdiffcopy.mp`.\"\"\" # Standard library modules. import functools import hashlib", "blocks (parallel).\"\"\" with WorkerPool( concurrency=concurrency, generator_fn=functools.partial(range, 0, os.path.getsize(filename), block_size), worker_fn=functools.partial(hash_worker,", "digest in pool: yield offset, digest def hash_worker(offset, block_size, filename,", "Fast large file synchronization inspired by rsync. # # Author:", "2020 # URL: https://pdiffcopy.readthedocs.io \"\"\"Parallel hashing of files using :mod:`multiprocessing`", "block_size, filename, method): \"\"\"Worker function to be run in child", "# Modules included in our package. from pdiffcopy.mp import WorkerPool", "= (\"compute_hashes\", \"hash_worker\") def compute_hashes(filename, block_size, method, concurrency): \"\"\"Compute checksums", "hash_worker(offset, block_size, filename, method): \"\"\"Worker function to be run in", "# Standard library modules. import functools import hashlib import os", "child processes.\"\"\" with open(filename, \"rb\") as handle: handle.seek(offset) context =", "for offset, digest in pool: yield offset, digest def hash_worker(offset,", "# Author: <NAME> <<EMAIL>> # Last Change: March 6, 2020", "os.path.getsize(filename), block_size), worker_fn=functools.partial(hash_worker, block_size=block_size, filename=filename, method=method), ) as pool: for", "dependencies. from six.moves import range # Modules included in our", "worker_fn=functools.partial(hash_worker, block_size=block_size, filename=filename, method=method), ) as pool: for offset, digest", "(\"compute_hashes\", \"hash_worker\") def compute_hashes(filename, block_size, method, concurrency): \"\"\"Compute checksums of", "identifiers that require documentation. __all__ = (\"compute_hashes\", \"hash_worker\") def compute_hashes(filename,", "with WorkerPool( concurrency=concurrency, generator_fn=functools.partial(range, 0, os.path.getsize(filename), block_size), worker_fn=functools.partial(hash_worker, block_size=block_size, filename=filename,", "six.moves import range # Modules included in our package. from", "in child processes.\"\"\" with open(filename, \"rb\") as handle: handle.seek(offset) context", "included in our package. from pdiffcopy.mp import WorkerPool # Public", "pool: yield offset, digest def hash_worker(offset, block_size, filename, method): \"\"\"Worker", "def compute_hashes(filename, block_size, method, concurrency): \"\"\"Compute checksums of a file", "(parallel).\"\"\" with WorkerPool( concurrency=concurrency, generator_fn=functools.partial(range, 0, os.path.getsize(filename), block_size), worker_fn=functools.partial(hash_worker, block_size=block_size,", "require documentation. __all__ = (\"compute_hashes\", \"hash_worker\") def compute_hashes(filename, block_size, method,", "URL: https://pdiffcopy.readthedocs.io \"\"\"Parallel hashing of files using :mod:`multiprocessing` and :mod:`pdiffcopy.mp`.\"\"\"", "block_size=block_size, filename=filename, method=method), ) as pool: for offset, digest in", "inspired by rsync. # # Author: <NAME> <<EMAIL>> # Last", "# Last Change: March 6, 2020 # URL: https://pdiffcopy.readthedocs.io \"\"\"Parallel", "package. from pdiffcopy.mp import WorkerPool # Public identifiers that require", "functools import hashlib import os # External dependencies. from six.moves", "of a file in blocks (parallel).\"\"\" with WorkerPool( concurrency=concurrency, generator_fn=functools.partial(range,", "<<EMAIL>> # Last Change: March 6, 2020 # URL: https://pdiffcopy.readthedocs.io", "# External dependencies. from six.moves import range # Modules included", "Public identifiers that require documentation. __all__ = (\"compute_hashes\", \"hash_worker\") def", "import hashlib import os # External dependencies. from six.moves import", "using :mod:`multiprocessing` and :mod:`pdiffcopy.mp`.\"\"\" # Standard library modules. import functools", "https://pdiffcopy.readthedocs.io \"\"\"Parallel hashing of files using :mod:`multiprocessing` and :mod:`pdiffcopy.mp`.\"\"\" #", "method): \"\"\"Worker function to be run in child processes.\"\"\" with", "hashlib import os # External dependencies. from six.moves import range", "import os # External dependencies. from six.moves import range #", "pdiffcopy.mp import WorkerPool # Public identifiers that require documentation. __all__", "os # External dependencies. from six.moves import range # Modules", "as pool: for offset, digest in pool: yield offset, digest", "as handle: handle.seek(offset) context = hashlib.new(method) context.update(handle.read(block_size)) return offset, context.hexdigest()", "range # Modules included in our package. from pdiffcopy.mp import", "March 6, 2020 # URL: https://pdiffcopy.readthedocs.io \"\"\"Parallel hashing of files", "\"\"\"Parallel hashing of files using :mod:`multiprocessing` and :mod:`pdiffcopy.mp`.\"\"\" # Standard", "of files using :mod:`multiprocessing` and :mod:`pdiffcopy.mp`.\"\"\" # Standard library modules.", "in our package. from pdiffcopy.mp import WorkerPool # Public identifiers", "run in child processes.\"\"\" with open(filename, \"rb\") as handle: handle.seek(offset)", "processes.\"\"\" with open(filename, \"rb\") as handle: handle.seek(offset) context = hashlib.new(method)", "Standard library modules. import functools import hashlib import os #", "# # Author: <NAME> <<EMAIL>> # Last Change: March 6,", "a file in blocks (parallel).\"\"\" with WorkerPool( concurrency=concurrency, generator_fn=functools.partial(range, 0,", "library modules. import functools import hashlib import os # External", "from six.moves import range # Modules included in our package.", "to be run in child processes.\"\"\" with open(filename, \"rb\") as", "generator_fn=functools.partial(range, 0, os.path.getsize(filename), block_size), worker_fn=functools.partial(hash_worker, block_size=block_size, filename=filename, method=method), ) as", "documentation. __all__ = (\"compute_hashes\", \"hash_worker\") def compute_hashes(filename, block_size, method, concurrency):", "External dependencies. from six.moves import range # Modules included in", "compute_hashes(filename, block_size, method, concurrency): \"\"\"Compute checksums of a file in", "\"\"\"Worker function to be run in child processes.\"\"\" with open(filename,", "Last Change: March 6, 2020 # URL: https://pdiffcopy.readthedocs.io \"\"\"Parallel hashing", "import range # Modules included in our package. from pdiffcopy.mp", "file in blocks (parallel).\"\"\" with WorkerPool( concurrency=concurrency, generator_fn=functools.partial(range, 0, os.path.getsize(filename),", "offset, digest def hash_worker(offset, block_size, filename, method): \"\"\"Worker function to", "\"hash_worker\") def compute_hashes(filename, block_size, method, concurrency): \"\"\"Compute checksums of a", "# Fast large file synchronization inspired by rsync. # #", "concurrency=concurrency, generator_fn=functools.partial(range, 0, os.path.getsize(filename), block_size), worker_fn=functools.partial(hash_worker, block_size=block_size, filename=filename, method=method), )", "\"\"\"Compute checksums of a file in blocks (parallel).\"\"\" with WorkerPool(", "modules. import functools import hashlib import os # External dependencies.", "files using :mod:`multiprocessing` and :mod:`pdiffcopy.mp`.\"\"\" # Standard library modules. import", "that require documentation. __all__ = (\"compute_hashes\", \"hash_worker\") def compute_hashes(filename, block_size,", "rsync. # # Author: <NAME> <<EMAIL>> # Last Change: March", "0, os.path.getsize(filename), block_size), worker_fn=functools.partial(hash_worker, block_size=block_size, filename=filename, method=method), ) as pool:", "digest def hash_worker(offset, block_size, filename, method): \"\"\"Worker function to be", "import WorkerPool # Public identifiers that require documentation. __all__ =", "large file synchronization inspired by rsync. # # Author: <NAME>", "# URL: https://pdiffcopy.readthedocs.io \"\"\"Parallel hashing of files using :mod:`multiprocessing` and", "\"rb\") as handle: handle.seek(offset) context = hashlib.new(method) context.update(handle.read(block_size)) return offset,", ":mod:`pdiffcopy.mp`.\"\"\" # Standard library modules. import functools import hashlib import", "file synchronization inspired by rsync. # # Author: <NAME> <<EMAIL>>", "6, 2020 # URL: https://pdiffcopy.readthedocs.io \"\"\"Parallel hashing of files using", "Modules included in our package. from pdiffcopy.mp import WorkerPool #", "function to be run in child processes.\"\"\" with open(filename, \"rb\")", "in blocks (parallel).\"\"\" with WorkerPool( concurrency=concurrency, generator_fn=functools.partial(range, 0, os.path.getsize(filename), block_size),", "method=method), ) as pool: for offset, digest in pool: yield", "filename=filename, method=method), ) as pool: for offset, digest in pool:", "method, concurrency): \"\"\"Compute checksums of a file in blocks (parallel).\"\"\"", "WorkerPool( concurrency=concurrency, generator_fn=functools.partial(range, 0, os.path.getsize(filename), block_size), worker_fn=functools.partial(hash_worker, block_size=block_size, filename=filename, method=method),", "in pool: yield offset, digest def hash_worker(offset, block_size, filename, method):", "hashing of files using :mod:`multiprocessing` and :mod:`pdiffcopy.mp`.\"\"\" # Standard library", "import functools import hashlib import os # External dependencies. from" ]
[ "NumberOfSpecies 1 %block AtomicCoordinatesAndAtomicSpecies 0.77573521 0.00000000 0.00000000 1 -0.77573521 0.00000000", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "1 %block AtomicCoordinatesAndAtomicSpecies 0.77573521 0.00000000 0.00000000 1 -0.77573521 0.00000000 0.00000000", ".true. \"\"\" label = 'siesta' fi = open(label+'.fdf', 'w') print(siesta_fdf,", "except: print('get_pseudo( '+sp+' ) is not working--> skip siesta run'", "label = 'siesta' fi = open(label+'.fdf', 'w') print(siesta_fdf, file=fi) fi.close()", "Developers. All Rights Reserved. # # Licensed under the Apache", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "All Rights Reserved. # # Licensed under the Apache License,", "2.0 (the \"License\"); # you may not use this file", "file except in compliance with the License. # You may", "agreed to in writing, software # distributed under the License", "Unless required by applicable law or agreed to in writing,", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "\"\"\" xml.write .true. PAO.EnergyShift 100 meV %block ChemicalSpeciesLabel 1 11", "skip siesta run' ) return os.symlink(pppath, sp+'.psf') errorcode = subprocess.call(get_siesta_command(label),", "Copyright 2014-2018 The PySCF Developers. All Rights Reserved. # #", "mf sv = mf(label=label) self.assertEqual(sv.norbs, 10) self.assertTrue( sv.diag_check() ) self.assertTrue(", "distributed under the License is distributed on an \"AS IS\"", "import unittest from pyscf.nao.m_siesta_utils import get_siesta_command, get_pseudo class KnowValues(unittest.TestCase): def", "siesta_fdf = \"\"\" xml.write .true. PAO.EnergyShift 100 meV %block ChemicalSpeciesLabel", "self.assertEqual(sv.norbs, 10) self.assertTrue( sv.diag_check() ) self.assertTrue( sv.overlap_check()) if __name__ ==", "= 'siesta' fi = open(label+'.fdf', 'w') print(siesta_fdf, file=fi) fi.close() for", "AtomicCoordinatesAndAtomicSpecies 0.77573521 0.00000000 0.00000000 1 -0.77573521 0.00000000 0.00000000 1 %endblock", "the specific language governing permissions and # limitations under the", "subprocess import os siesta_fdf = \"\"\" xml.write .true. PAO.EnergyShift 100", "%endblock AtomicCoordinatesAndAtomicSpecies MD.NumCGsteps 0 COOP.Write .true. WriteDenchar .true. \"\"\" label", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "for sp in ['Na']: try: os.remove(sp+'.psf') except : pass try:", "os.symlink(pppath, sp+'.psf') errorcode = subprocess.call(get_siesta_command(label), shell=True) if errorcode: raise RuntimeError('siesta", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "except in compliance with the License. # You may obtain", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "siesta run' ) return os.symlink(pppath, sp+'.psf') errorcode = subprocess.call(get_siesta_command(label), shell=True)", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "KnowValues(unittest.TestCase): def test_siesta2sv_df(self): import subprocess import os siesta_fdf = \"\"\"", "is not working--> skip siesta run' ) return os.symlink(pppath, sp+'.psf')", "writing, software # distributed under the License is distributed on", "0 COOP.Write .true. WriteDenchar .true. \"\"\" label = 'siesta' fi", "in writing, software # distributed under the License is distributed", "1 %endblock AtomicCoordinatesAndAtomicSpecies MD.NumCGsteps 0 COOP.Write .true. WriteDenchar .true. \"\"\"", "you may not use this file except in compliance with", "%endblock ChemicalSpeciesLabel NumberOfAtoms 2 NumberOfSpecies 1 %block AtomicCoordinatesAndAtomicSpecies 0.77573521 0.00000000", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "sp in ['Na']: try: os.remove(sp+'.psf') except : pass try: pppath", "language governing permissions and # limitations under the License. from", "fi = open(label+'.fdf', 'w') print(siesta_fdf, file=fi) fi.close() for sp in", "pppath = get_pseudo(sp) except: print('get_pseudo( '+sp+' ) is not working-->", "Na %endblock ChemicalSpeciesLabel NumberOfAtoms 2 NumberOfSpecies 1 %block AtomicCoordinatesAndAtomicSpecies 0.77573521", "test_siesta2sv_df(self): import subprocess import os siesta_fdf = \"\"\" xml.write .true.", "\"\"\" label = 'siesta' fi = open(label+'.fdf', 'w') print(siesta_fdf, file=fi)", "raise RuntimeError('siesta returned an error: {0}'.format(errorcode)) # run test system_vars", "10) self.assertTrue( sv.diag_check() ) self.assertTrue( sv.overlap_check()) if __name__ == \"__main__\":", "use this file except in compliance with the License. #", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "under the License. from __future__ import print_function import unittest from", "License. from __future__ import print_function import unittest from pyscf.nao.m_siesta_utils import", "%block AtomicCoordinatesAndAtomicSpecies 0.77573521 0.00000000 0.00000000 1 -0.77573521 0.00000000 0.00000000 1", "subprocess.call(get_siesta_command(label), shell=True) if errorcode: raise RuntimeError('siesta returned an error: {0}'.format(errorcode))", "sv = mf(label=label) self.assertEqual(sv.norbs, 10) self.assertTrue( sv.diag_check() ) self.assertTrue( sv.overlap_check())", "CONDITIONS OF ANY KIND, either express or implied. # See", "11 Na %endblock ChemicalSpeciesLabel NumberOfAtoms 2 NumberOfSpecies 1 %block AtomicCoordinatesAndAtomicSpecies", "WriteDenchar .true. \"\"\" label = 'siesta' fi = open(label+'.fdf', 'w')", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "# limitations under the License. from __future__ import print_function import", "in ['Na']: try: os.remove(sp+'.psf') except : pass try: pppath =", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "Rights Reserved. # # Licensed under the Apache License, Version", "governing permissions and # limitations under the License. from __future__", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "print_function import unittest from pyscf.nao.m_siesta_utils import get_siesta_command, get_pseudo class KnowValues(unittest.TestCase):", "get_siesta_command, get_pseudo class KnowValues(unittest.TestCase): def test_siesta2sv_df(self): import subprocess import os", "print(siesta_fdf, file=fi) fi.close() for sp in ['Na']: try: os.remove(sp+'.psf') except", "# You may obtain a copy of the License at", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "pass try: pppath = get_pseudo(sp) except: print('get_pseudo( '+sp+' ) is", "import subprocess import os siesta_fdf = \"\"\" xml.write .true. PAO.EnergyShift", "# Copyright 2014-2018 The PySCF Developers. All Rights Reserved. #", ".true. WriteDenchar .true. \"\"\" label = 'siesta' fi = open(label+'.fdf',", "unittest from pyscf.nao.m_siesta_utils import get_siesta_command, get_pseudo class KnowValues(unittest.TestCase): def test_siesta2sv_df(self):", "under the License is distributed on an \"AS IS\" BASIS,", "not working--> skip siesta run' ) return os.symlink(pppath, sp+'.psf') errorcode", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "License for the specific language governing permissions and # limitations", ") return os.symlink(pppath, sp+'.psf') errorcode = subprocess.call(get_siesta_command(label), shell=True) if errorcode:", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "<filename>pyscf/nao/test/test_0003_na2_nao.py # Copyright 2014-2018 The PySCF Developers. All Rights Reserved.", "pyscf.nao.m_siesta_utils import get_siesta_command, get_pseudo class KnowValues(unittest.TestCase): def test_siesta2sv_df(self): import subprocess", "= \"\"\" xml.write .true. PAO.EnergyShift 100 meV %block ChemicalSpeciesLabel 1", "import print_function import unittest from pyscf.nao.m_siesta_utils import get_siesta_command, get_pseudo class", "Reserved. # # Licensed under the Apache License, Version 2.0", "PySCF Developers. All Rights Reserved. # # Licensed under the", "meV %block ChemicalSpeciesLabel 1 11 Na %endblock ChemicalSpeciesLabel NumberOfAtoms 2", ") is not working--> skip siesta run' ) return os.symlink(pppath,", "2 NumberOfSpecies 1 %block AtomicCoordinatesAndAtomicSpecies 0.77573521 0.00000000 0.00000000 1 -0.77573521", "open(label+'.fdf', 'w') print(siesta_fdf, file=fi) fi.close() for sp in ['Na']: try:", "os.remove(sp+'.psf') except : pass try: pppath = get_pseudo(sp) except: print('get_pseudo(", "2014-2018 The PySCF Developers. All Rights Reserved. # # Licensed", "pyscf.nao import mf sv = mf(label=label) self.assertEqual(sv.norbs, 10) self.assertTrue( sv.diag_check()", "the License for the specific language governing permissions and #", "(the \"License\"); # you may not use this file except", "Apache License, Version 2.0 (the \"License\"); # you may not", "# you may not use this file except in compliance", "either express or implied. # See the License for the", "system_vars from pyscf.nao import mf sv = mf(label=label) self.assertEqual(sv.norbs, 10)", "OR CONDITIONS OF ANY KIND, either express or implied. #", "PAO.EnergyShift 100 meV %block ChemicalSpeciesLabel 1 11 Na %endblock ChemicalSpeciesLabel", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "from pyscf.nao import mf sv = mf(label=label) self.assertEqual(sv.norbs, 10) self.assertTrue(", "the License is distributed on an \"AS IS\" BASIS, #", "errorcode = subprocess.call(get_siesta_command(label), shell=True) if errorcode: raise RuntimeError('siesta returned an", "error: {0}'.format(errorcode)) # run test system_vars from pyscf.nao import mf", "in compliance with the License. # You may obtain a", "permissions and # limitations under the License. from __future__ import", "from __future__ import print_function import unittest from pyscf.nao.m_siesta_utils import get_siesta_command,", "mf(label=label) self.assertEqual(sv.norbs, 10) self.assertTrue( sv.diag_check() ) self.assertTrue( sv.overlap_check()) if __name__", "errorcode: raise RuntimeError('siesta returned an error: {0}'.format(errorcode)) # run test", "software # distributed under the License is distributed on an", "ChemicalSpeciesLabel 1 11 Na %endblock ChemicalSpeciesLabel NumberOfAtoms 2 NumberOfSpecies 1", "the License. from __future__ import print_function import unittest from pyscf.nao.m_siesta_utils", "test system_vars from pyscf.nao import mf sv = mf(label=label) self.assertEqual(sv.norbs,", "import os siesta_fdf = \"\"\" xml.write .true. PAO.EnergyShift 100 meV", "working--> skip siesta run' ) return os.symlink(pppath, sp+'.psf') errorcode =", "# # Unless required by applicable law or agreed to", "limitations under the License. from __future__ import print_function import unittest", "# run test system_vars from pyscf.nao import mf sv =", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "shell=True) if errorcode: raise RuntimeError('siesta returned an error: {0}'.format(errorcode)) #", "RuntimeError('siesta returned an error: {0}'.format(errorcode)) # run test system_vars from", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "AtomicCoordinatesAndAtomicSpecies MD.NumCGsteps 0 COOP.Write .true. WriteDenchar .true. \"\"\" label =", "Version 2.0 (the \"License\"); # you may not use this", "0.77573521 0.00000000 0.00000000 1 -0.77573521 0.00000000 0.00000000 1 %endblock AtomicCoordinatesAndAtomicSpecies", "law or agreed to in writing, software # distributed under", "'+sp+' ) is not working--> skip siesta run' ) return", "an error: {0}'.format(errorcode)) # run test system_vars from pyscf.nao import", "= open(label+'.fdf', 'w') print(siesta_fdf, file=fi) fi.close() for sp in ['Na']:", "implied. # See the License for the specific language governing", "and # limitations under the License. from __future__ import print_function", "xml.write .true. PAO.EnergyShift 100 meV %block ChemicalSpeciesLabel 1 11 Na", "class KnowValues(unittest.TestCase): def test_siesta2sv_df(self): import subprocess import os siesta_fdf =", "under the Apache License, Version 2.0 (the \"License\"); # you", "from pyscf.nao.m_siesta_utils import get_siesta_command, get_pseudo class KnowValues(unittest.TestCase): def test_siesta2sv_df(self): import", ": pass try: pppath = get_pseudo(sp) except: print('get_pseudo( '+sp+' )", "\"License\"); # you may not use this file except in", "print('get_pseudo( '+sp+' ) is not working--> skip siesta run' )", "= get_pseudo(sp) except: print('get_pseudo( '+sp+' ) is not working--> skip", "NumberOfAtoms 2 NumberOfSpecies 1 %block AtomicCoordinatesAndAtomicSpecies 0.77573521 0.00000000 0.00000000 1", "file=fi) fi.close() for sp in ['Na']: try: os.remove(sp+'.psf') except :", "self.assertTrue( sv.diag_check() ) self.assertTrue( sv.overlap_check()) if __name__ == \"__main__\": unittest.main()", "fi.close() for sp in ['Na']: try: os.remove(sp+'.psf') except : pass", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "try: os.remove(sp+'.psf') except : pass try: pppath = get_pseudo(sp) except:", "0.00000000 1 %endblock AtomicCoordinatesAndAtomicSpecies MD.NumCGsteps 0 COOP.Write .true. WriteDenchar .true.", "try: pppath = get_pseudo(sp) except: print('get_pseudo( '+sp+' ) is not", "run test system_vars from pyscf.nao import mf sv = mf(label=label)", "def test_siesta2sv_df(self): import subprocess import os siesta_fdf = \"\"\" xml.write", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "100 meV %block ChemicalSpeciesLabel 1 11 Na %endblock ChemicalSpeciesLabel NumberOfAtoms", "return os.symlink(pppath, sp+'.psf') errorcode = subprocess.call(get_siesta_command(label), shell=True) if errorcode: raise", "The PySCF Developers. All Rights Reserved. # # Licensed under", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "except : pass try: pppath = get_pseudo(sp) except: print('get_pseudo( '+sp+'", "sp+'.psf') errorcode = subprocess.call(get_siesta_command(label), shell=True) if errorcode: raise RuntimeError('siesta returned", "if errorcode: raise RuntimeError('siesta returned an error: {0}'.format(errorcode)) # run", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", ".true. PAO.EnergyShift 100 meV %block ChemicalSpeciesLabel 1 11 Na %endblock", "MD.NumCGsteps 0 COOP.Write .true. WriteDenchar .true. \"\"\" label = 'siesta'", "get_pseudo(sp) except: print('get_pseudo( '+sp+' ) is not working--> skip siesta", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "1 11 Na %endblock ChemicalSpeciesLabel NumberOfAtoms 2 NumberOfSpecies 1 %block", "to in writing, software # distributed under the License is", "%block ChemicalSpeciesLabel 1 11 Na %endblock ChemicalSpeciesLabel NumberOfAtoms 2 NumberOfSpecies", "returned an error: {0}'.format(errorcode)) # run test system_vars from pyscf.nao", "os siesta_fdf = \"\"\" xml.write .true. PAO.EnergyShift 100 meV %block", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# See the License for the specific language governing permissions", "= mf(label=label) self.assertEqual(sv.norbs, 10) self.assertTrue( sv.diag_check() ) self.assertTrue( sv.overlap_check()) if", "{0}'.format(errorcode)) # run test system_vars from pyscf.nao import mf sv", "get_pseudo class KnowValues(unittest.TestCase): def test_siesta2sv_df(self): import subprocess import os siesta_fdf", "You may obtain a copy of the License at #", "'w') print(siesta_fdf, file=fi) fi.close() for sp in ['Na']: try: os.remove(sp+'.psf')", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "COOP.Write .true. WriteDenchar .true. \"\"\" label = 'siesta' fi =", "import mf sv = mf(label=label) self.assertEqual(sv.norbs, 10) self.assertTrue( sv.diag_check() )", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "'siesta' fi = open(label+'.fdf', 'w') print(siesta_fdf, file=fi) fi.close() for sp", "run' ) return os.symlink(pppath, sp+'.psf') errorcode = subprocess.call(get_siesta_command(label), shell=True) if", "import get_siesta_command, get_pseudo class KnowValues(unittest.TestCase): def test_siesta2sv_df(self): import subprocess import", "with the License. # You may obtain a copy of", "['Na']: try: os.remove(sp+'.psf') except : pass try: pppath = get_pseudo(sp)", "this file except in compliance with the License. # You", "__future__ import print_function import unittest from pyscf.nao.m_siesta_utils import get_siesta_command, get_pseudo", "the Apache License, Version 2.0 (the \"License\"); # you may", "1 -0.77573521 0.00000000 0.00000000 1 %endblock AtomicCoordinatesAndAtomicSpecies MD.NumCGsteps 0 COOP.Write", "= subprocess.call(get_siesta_command(label), shell=True) if errorcode: raise RuntimeError('siesta returned an error:", "0.00000000 1 -0.77573521 0.00000000 0.00000000 1 %endblock AtomicCoordinatesAndAtomicSpecies MD.NumCGsteps 0", "ChemicalSpeciesLabel NumberOfAtoms 2 NumberOfSpecies 1 %block AtomicCoordinatesAndAtomicSpecies 0.77573521 0.00000000 0.00000000", "0.00000000 0.00000000 1 -0.77573521 0.00000000 0.00000000 1 %endblock AtomicCoordinatesAndAtomicSpecies MD.NumCGsteps", "0.00000000 0.00000000 1 %endblock AtomicCoordinatesAndAtomicSpecies MD.NumCGsteps 0 COOP.Write .true. WriteDenchar", "-0.77573521 0.00000000 0.00000000 1 %endblock AtomicCoordinatesAndAtomicSpecies MD.NumCGsteps 0 COOP.Write .true." ]
[ "import pytest from moz_library.rental_books import RentalBooks class TestRentalBooks: @pytest.fixture() def", "moz_library.rental_books import RentalBooks class TestRentalBooks: @pytest.fixture() def books1(self): return RentalBooks()", "books1._can_extend_period(\"延長できません\") is False def test_can_extend_period_2(self, books1): assert books1._can_extend_period(\"すでに延長されています\") is False", "RentalBooks class TestRentalBooks: @pytest.fixture() def books1(self): return RentalBooks() def test_can_extend_period_1(self,", "test_can_extend_period_2(self, books1): assert books1._can_extend_period(\"すでに延長されています\") is False def test_can_extend_period_3(self, books1): assert", "class TestRentalBooks: @pytest.fixture() def books1(self): return RentalBooks() def test_can_extend_period_1(self, books1):", "@pytest.fixture() def books1(self): return RentalBooks() def test_can_extend_period_1(self, books1): assert books1._can_extend_period(\"延長できません\")", "books1._can_extend_period(\"すでに延長されています\") is False def test_can_extend_period_3(self, books1): assert books1._can_extend_period(\"それ以外\") is True", "import RentalBooks class TestRentalBooks: @pytest.fixture() def books1(self): return RentalBooks() def", "books1(self): return RentalBooks() def test_can_extend_period_1(self, books1): assert books1._can_extend_period(\"延長できません\") is False", "books1): assert books1._can_extend_period(\"延長できません\") is False def test_can_extend_period_2(self, books1): assert books1._can_extend_period(\"すでに延長されています\")", "def books1(self): return RentalBooks() def test_can_extend_period_1(self, books1): assert books1._can_extend_period(\"延長できません\") is", "books1): assert books1._can_extend_period(\"すでに延長されています\") is False def test_can_extend_period_3(self, books1): assert books1._can_extend_period(\"それ以外\")", "from moz_library.rental_books import RentalBooks class TestRentalBooks: @pytest.fixture() def books1(self): return", "TestRentalBooks: @pytest.fixture() def books1(self): return RentalBooks() def test_can_extend_period_1(self, books1): assert", "assert books1._can_extend_period(\"延長できません\") is False def test_can_extend_period_2(self, books1): assert books1._can_extend_period(\"すでに延長されています\") is", "test_can_extend_period_1(self, books1): assert books1._can_extend_period(\"延長できません\") is False def test_can_extend_period_2(self, books1): assert", "assert books1._can_extend_period(\"すでに延長されています\") is False def test_can_extend_period_3(self, books1): assert books1._can_extend_period(\"それ以外\") is", "pytest from moz_library.rental_books import RentalBooks class TestRentalBooks: @pytest.fixture() def books1(self):", "def test_can_extend_period_1(self, books1): assert books1._can_extend_period(\"延長できません\") is False def test_can_extend_period_2(self, books1):", "RentalBooks() def test_can_extend_period_1(self, books1): assert books1._can_extend_period(\"延長できません\") is False def test_can_extend_period_2(self,", "def test_can_extend_period_2(self, books1): assert books1._can_extend_period(\"すでに延長されています\") is False def test_can_extend_period_3(self, books1):", "return RentalBooks() def test_can_extend_period_1(self, books1): assert books1._can_extend_period(\"延長できません\") is False def", "is False def test_can_extend_period_2(self, books1): assert books1._can_extend_period(\"すでに延長されています\") is False def", "False def test_can_extend_period_2(self, books1): assert books1._can_extend_period(\"すでに延長されています\") is False def test_can_extend_period_3(self," ]
[ "\"D3\", 20)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D4\", 30)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D5\", 40)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D6\",", "slides.Presentation() as pres: slide = pres.slides[0] ch = pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 100,", "\"E\")) category.grouping_levels.set_grouping_item(1, \"Group3\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c7\", \"F\")) category =", "ch.chart_data.chart_data_workbook fact.clear(0) defaultWorksheetIndex = 0 category = ch.chart_data.categories.add(fact.get_cell(0, \"c2\", \"A\"))", "slides def charts_multi_category_chart(): #ExStart:MultiCategoryChart # The path to the documents", "as pres: slide = pres.slides[0] ch = pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 100, 100,", "100, 100, 600, 450) ch.chart_data.series.clear() ch.chart_data.categories.clear() fact = ch.chart_data.chart_data_workbook fact.clear(0)", "category.grouping_levels.set_grouping_item(1, \"Group2\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c5\", \"D\")) category = ch.chart_data.categories.add(fact.get_cell(0,", "import aspose.pydrawing as drawing import aspose.slides as slides def charts_multi_category_chart():", "category = ch.chart_data.categories.add(fact.get_cell(0, \"c9\", \"H\")) # Adding Series series =", "def charts_multi_category_chart(): #ExStart:MultiCategoryChart # The path to the documents directory.", "\"D6\", 50)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D7\", 60)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D8\", 70)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D9\",", "= ch.chart_data.categories.add(fact.get_cell(0, \"c2\", \"A\")) category.grouping_levels.set_grouping_item(1, \"Group1\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c3\",", "\"Group3\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c7\", \"F\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c8\",", "series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D6\", 50)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D7\", 60)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D8\", 70)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex,", "as slides def charts_multi_category_chart(): #ExStart:MultiCategoryChart # The path to the", "= ch.chart_data.series.add(fact.get_cell(0, \"D1\", \"Series 1\"), slides.charts.ChartType.CLUSTERED_COLUMN) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D2\", 10)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex,", "to the documents directory. outDir = \"./examples/out/\" with slides.Presentation() as", "ch = pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 100, 100, 600, 450) ch.chart_data.series.clear() ch.chart_data.categories.clear() fact", "outDir = \"./examples/out/\" with slides.Presentation() as pres: slide = pres.slides[0]", "aspose.pydrawing as drawing import aspose.slides as slides def charts_multi_category_chart(): #ExStart:MultiCategoryChart", "= ch.chart_data.categories.add(fact.get_cell(0, \"c4\", \"C\")) category.grouping_levels.set_grouping_item(1, \"Group2\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c5\",", "category = ch.chart_data.categories.add(fact.get_cell(0, \"c6\", \"E\")) category.grouping_levels.set_grouping_item(1, \"Group3\") category = ch.chart_data.categories.add(fact.get_cell(0,", "\"c3\", \"B\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c4\", \"C\")) category.grouping_levels.set_grouping_item(1, \"Group2\") category", "= ch.chart_data.categories.add(fact.get_cell(0, \"c7\", \"F\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c8\", \"G\")) category.grouping_levels.set_grouping_item(1,", "# Save presentation with chart pres.save(outDir + \"charts_multi_category_chart_out.pptx\", slides.export.SaveFormat.PPTX) #ExEnd:MultiCategoryChart", "series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D2\", 10)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D3\", 20)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D4\", 30)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex,", "\"B\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c4\", \"C\")) category.grouping_levels.set_grouping_item(1, \"Group2\") category =", "fact.clear(0) defaultWorksheetIndex = 0 category = ch.chart_data.categories.add(fact.get_cell(0, \"c2\", \"A\")) category.grouping_levels.set_grouping_item(1,", "series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D9\", 80)) # Save presentation with chart pres.save(outDir +", "= ch.chart_data.chart_data_workbook fact.clear(0) defaultWorksheetIndex = 0 category = ch.chart_data.categories.add(fact.get_cell(0, \"c2\",", "\"D5\", 40)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D6\", 50)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D7\", 60)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D8\",", "pres: slide = pres.slides[0] ch = pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 100, 100, 600,", "<gh_stars>0 import aspose.pydrawing as drawing import aspose.slides as slides def", "directory. outDir = \"./examples/out/\" with slides.Presentation() as pres: slide =", "series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D3\", 20)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D4\", 30)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D5\", 40)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex,", "slide = pres.slides[0] ch = pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 100, 100, 600, 450)", "slides.charts.ChartType.CLUSTERED_COLUMN) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D2\", 10)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D3\", 20)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D4\", 30))", "60)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D8\", 70)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D9\", 80)) # Save presentation", "# Adding Series series = ch.chart_data.series.add(fact.get_cell(0, \"D1\", \"Series 1\"), slides.charts.ChartType.CLUSTERED_COLUMN)", "80)) # Save presentation with chart pres.save(outDir + \"charts_multi_category_chart_out.pptx\", slides.export.SaveFormat.PPTX)", "\"D\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c6\", \"E\")) category.grouping_levels.set_grouping_item(1, \"Group3\") category =", "with slides.Presentation() as pres: slide = pres.slides[0] ch = pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN,", "450) ch.chart_data.series.clear() ch.chart_data.categories.clear() fact = ch.chart_data.chart_data_workbook fact.clear(0) defaultWorksheetIndex = 0", "70)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D9\", 80)) # Save presentation with chart pres.save(outDir", "\"c5\", \"D\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c6\", \"E\")) category.grouping_levels.set_grouping_item(1, \"Group3\") category", "category.grouping_levels.set_grouping_item(1, \"Group4\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c9\", \"H\")) # Adding Series", "series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D7\", 60)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D8\", 70)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D9\", 80)) #", "category = ch.chart_data.categories.add(fact.get_cell(0, \"c5\", \"D\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c6\", \"E\"))", "# The path to the documents directory. outDir = \"./examples/out/\"", "category = ch.chart_data.categories.add(fact.get_cell(0, \"c7\", \"F\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c8\", \"G\"))", "\"G\")) category.grouping_levels.set_grouping_item(1, \"Group4\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c9\", \"H\")) # Adding", "= ch.chart_data.categories.add(fact.get_cell(0, \"c9\", \"H\")) # Adding Series series = ch.chart_data.series.add(fact.get_cell(0,", "600, 450) ch.chart_data.series.clear() ch.chart_data.categories.clear() fact = ch.chart_data.chart_data_workbook fact.clear(0) defaultWorksheetIndex =", "ch.chart_data.categories.add(fact.get_cell(0, \"c9\", \"H\")) # Adding Series series = ch.chart_data.series.add(fact.get_cell(0, \"D1\",", "= pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 100, 100, 600, 450) ch.chart_data.series.clear() ch.chart_data.categories.clear() fact =", "= 0 category = ch.chart_data.categories.add(fact.get_cell(0, \"c2\", \"A\")) category.grouping_levels.set_grouping_item(1, \"Group1\") category", "\"H\")) # Adding Series series = ch.chart_data.series.add(fact.get_cell(0, \"D1\", \"Series 1\"),", "\"Group1\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c3\", \"B\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c4\",", "as drawing import aspose.slides as slides def charts_multi_category_chart(): #ExStart:MultiCategoryChart #", "\"Group2\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c5\", \"D\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c6\",", "\"c8\", \"G\")) category.grouping_levels.set_grouping_item(1, \"Group4\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c9\", \"H\")) #", "category.grouping_levels.set_grouping_item(1, \"Group1\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c3\", \"B\")) category = ch.chart_data.categories.add(fact.get_cell(0,", "\"c7\", \"F\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c8\", \"G\")) category.grouping_levels.set_grouping_item(1, \"Group4\") category", "0 category = ch.chart_data.categories.add(fact.get_cell(0, \"c2\", \"A\")) category.grouping_levels.set_grouping_item(1, \"Group1\") category =", "= \"./examples/out/\" with slides.Presentation() as pres: slide = pres.slides[0] ch", "\"D7\", 60)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D8\", 70)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D9\", 80)) # Save", "category = ch.chart_data.categories.add(fact.get_cell(0, \"c4\", \"C\")) category.grouping_levels.set_grouping_item(1, \"Group2\") category = ch.chart_data.categories.add(fact.get_cell(0,", "\"c9\", \"H\")) # Adding Series series = ch.chart_data.series.add(fact.get_cell(0, \"D1\", \"Series", "ch.chart_data.categories.add(fact.get_cell(0, \"c4\", \"C\")) category.grouping_levels.set_grouping_item(1, \"Group2\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c5\", \"D\"))", "pres.slides[0] ch = pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 100, 100, 600, 450) ch.chart_data.series.clear() ch.chart_data.categories.clear()", "1\"), slides.charts.ChartType.CLUSTERED_COLUMN) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D2\", 10)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D3\", 20)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D4\",", "path to the documents directory. outDir = \"./examples/out/\" with slides.Presentation()", "\"c4\", \"C\")) category.grouping_levels.set_grouping_item(1, \"Group2\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c5\", \"D\")) category", "series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D8\", 70)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D9\", 80)) # Save presentation with", "= ch.chart_data.categories.add(fact.get_cell(0, \"c8\", \"G\")) category.grouping_levels.set_grouping_item(1, \"Group4\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c9\",", "series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D4\", 30)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D5\", 40)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D6\", 50)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex,", "= ch.chart_data.categories.add(fact.get_cell(0, \"c5\", \"D\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c6\", \"E\")) category.grouping_levels.set_grouping_item(1,", "\"./examples/out/\" with slides.Presentation() as pres: slide = pres.slides[0] ch =", "category.grouping_levels.set_grouping_item(1, \"Group3\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c7\", \"F\")) category = ch.chart_data.categories.add(fact.get_cell(0,", "50)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D7\", 60)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D8\", 70)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D9\", 80))", "ch.chart_data.categories.add(fact.get_cell(0, \"c5\", \"D\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c6\", \"E\")) category.grouping_levels.set_grouping_item(1, \"Group3\")", "= ch.chart_data.categories.add(fact.get_cell(0, \"c6\", \"E\")) category.grouping_levels.set_grouping_item(1, \"Group3\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c7\",", "\"D9\", 80)) # Save presentation with chart pres.save(outDir + \"charts_multi_category_chart_out.pptx\",", "pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 100, 100, 600, 450) ch.chart_data.series.clear() ch.chart_data.categories.clear() fact = ch.chart_data.chart_data_workbook", "\"A\")) category.grouping_levels.set_grouping_item(1, \"Group1\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c3\", \"B\")) category =", "defaultWorksheetIndex = 0 category = ch.chart_data.categories.add(fact.get_cell(0, \"c2\", \"A\")) category.grouping_levels.set_grouping_item(1, \"Group1\")", "category = ch.chart_data.categories.add(fact.get_cell(0, \"c3\", \"B\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c4\", \"C\"))", "\"C\")) category.grouping_levels.set_grouping_item(1, \"Group2\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c5\", \"D\")) category =", "Adding Series series = ch.chart_data.series.add(fact.get_cell(0, \"D1\", \"Series 1\"), slides.charts.ChartType.CLUSTERED_COLUMN) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex,", "#ExStart:MultiCategoryChart # The path to the documents directory. outDir =", "30)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D5\", 40)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D6\", 50)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D7\", 60))", "ch.chart_data.categories.add(fact.get_cell(0, \"c8\", \"G\")) category.grouping_levels.set_grouping_item(1, \"Group4\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c9\", \"H\"))", "category = ch.chart_data.categories.add(fact.get_cell(0, \"c8\", \"G\")) category.grouping_levels.set_grouping_item(1, \"Group4\") category = ch.chart_data.categories.add(fact.get_cell(0,", "documents directory. outDir = \"./examples/out/\" with slides.Presentation() as pres: slide", "ch.chart_data.categories.add(fact.get_cell(0, \"c6\", \"E\")) category.grouping_levels.set_grouping_item(1, \"Group3\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c7\", \"F\"))", "drawing import aspose.slides as slides def charts_multi_category_chart(): #ExStart:MultiCategoryChart # The", "\"D4\", 30)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D5\", 40)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D6\", 50)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D7\",", "Series series = ch.chart_data.series.add(fact.get_cell(0, \"D1\", \"Series 1\"), slides.charts.ChartType.CLUSTERED_COLUMN) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D2\",", "10)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D3\", 20)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D4\", 30)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D5\", 40))", "\"D8\", 70)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D9\", 80)) # Save presentation with chart", "= ch.chart_data.categories.add(fact.get_cell(0, \"c3\", \"B\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c4\", \"C\")) category.grouping_levels.set_grouping_item(1,", "fact = ch.chart_data.chart_data_workbook fact.clear(0) defaultWorksheetIndex = 0 category = ch.chart_data.categories.add(fact.get_cell(0,", "ch.chart_data.series.clear() ch.chart_data.categories.clear() fact = ch.chart_data.chart_data_workbook fact.clear(0) defaultWorksheetIndex = 0 category", "40)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D6\", 50)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D7\", 60)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D8\", 70))", "\"Group4\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c9\", \"H\")) # Adding Series series", "ch.chart_data.categories.add(fact.get_cell(0, \"c2\", \"A\")) category.grouping_levels.set_grouping_item(1, \"Group1\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c3\", \"B\"))", "\"D2\", 10)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D3\", 20)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D4\", 30)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D5\",", "series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D5\", 40)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D6\", 50)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D7\", 60)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex,", "\"c6\", \"E\")) category.grouping_levels.set_grouping_item(1, \"Group3\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c7\", \"F\")) category", "aspose.slides as slides def charts_multi_category_chart(): #ExStart:MultiCategoryChart # The path to", "charts_multi_category_chart(): #ExStart:MultiCategoryChart # The path to the documents directory. outDir", "= pres.slides[0] ch = pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 100, 100, 600, 450) ch.chart_data.series.clear()", "\"D1\", \"Series 1\"), slides.charts.ChartType.CLUSTERED_COLUMN) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D2\", 10)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D3\", 20))", "ch.chart_data.categories.add(fact.get_cell(0, \"c7\", \"F\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c8\", \"G\")) category.grouping_levels.set_grouping_item(1, \"Group4\")", "\"F\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c8\", \"G\")) category.grouping_levels.set_grouping_item(1, \"Group4\") category =", "The path to the documents directory. outDir = \"./examples/out/\" with", "category = ch.chart_data.categories.add(fact.get_cell(0, \"c2\", \"A\")) category.grouping_levels.set_grouping_item(1, \"Group1\") category = ch.chart_data.categories.add(fact.get_cell(0,", "the documents directory. outDir = \"./examples/out/\" with slides.Presentation() as pres:", "\"c2\", \"A\")) category.grouping_levels.set_grouping_item(1, \"Group1\") category = ch.chart_data.categories.add(fact.get_cell(0, \"c3\", \"B\")) category", "ch.chart_data.categories.add(fact.get_cell(0, \"c3\", \"B\")) category = ch.chart_data.categories.add(fact.get_cell(0, \"c4\", \"C\")) category.grouping_levels.set_grouping_item(1, \"Group2\")", "ch.chart_data.series.add(fact.get_cell(0, \"D1\", \"Series 1\"), slides.charts.ChartType.CLUSTERED_COLUMN) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D2\", 10)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D3\",", "ch.chart_data.categories.clear() fact = ch.chart_data.chart_data_workbook fact.clear(0) defaultWorksheetIndex = 0 category =", "\"Series 1\"), slides.charts.ChartType.CLUSTERED_COLUMN) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D2\", 10)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D3\", 20)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex,", "import aspose.slides as slides def charts_multi_category_chart(): #ExStart:MultiCategoryChart # The path", "series = ch.chart_data.series.add(fact.get_cell(0, \"D1\", \"Series 1\"), slides.charts.ChartType.CLUSTERED_COLUMN) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D2\", 10))", "20)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D4\", 30)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D5\", 40)) series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, \"D6\", 50))", "100, 600, 450) ch.chart_data.series.clear() ch.chart_data.categories.clear() fact = ch.chart_data.chart_data_workbook fact.clear(0) defaultWorksheetIndex" ]
[ "import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup from extras.choices import", "'enabled')), ('Events', ('type_create', 'type_update', 'type_delete')), ) content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(),", "limit_choices_to=FeatureQuery('export_templates'), required=False ) mime_type = forms.CharField( required=False, label=_('MIME type') )", "= ( (None, ('q',)), ('Attributes', ('type', 'content_types', 'weight', 'required')), )", "widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) weight = forms.IntegerField( required=False ) class", "= ( (None, ('q',)), ('Attributes', ('content_type', 'mime_type', 'file_extension', 'as_attachment')), )", ") ) kind = forms.ChoiceField( choices=add_blank_choice(JournalEntryKindChoices), required=False, widget=StaticSelect() ) tag", "( (None, ('q',)), ('Time', ('time_before', 'time_after')), ('Attributes', ('action', 'user_id', 'changed_object_type_id')),", "limit_choices_to=FeatureQuery('custom_links'), required=False ) enabled = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES )", "( (None, ('q', 'tag')), ('Creation', ('created_before', 'created_after', 'created_by_id')), ('Attributes', ('assigned_object_type_id',", "= forms.IntegerField( required=False ) class ExportTemplateFilterForm(FilterForm): fieldsets = ( (None,", "'platform_id', 'role_id')), ('Cluster', ('cluster_type_id', 'cluster_group_id', 'cluster_id')), ('Tenant', ('tenant_group_id', 'tenant_id')) )", "required=False ) enabled = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) )", ") tenant_group_id = DynamicModelMultipleChoiceField( queryset=TenantGroup.objects.all(), required=False, label=_('Tenant groups') ) tenant_id", "= TagFilterField(model) class ObjectChangeFilterForm(FilterForm): model = ObjectChange fieldsets = (", "required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_delete = forms.NullBooleanField( required=False, widget=StaticSelect(", "method') ) enabled = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) )", "'file_extension', 'as_attachment')), ) content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('export_templates'), required=False )", "* from extras.utils import FeatureQuery from netbox.forms.base import NetBoxModelFilterSetForm from", "class CustomLinkFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('content_type', 'enabled',", "DynamicModelMultipleChoiceField( queryset=DeviceRole.objects.all(), required=False, label=_('Roles') ) platform_id = DynamicModelMultipleChoiceField( queryset=Platform.objects.all(), required=False,", "required=False, label=_('HTTP method') ) enabled = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES", "'kind')) ) created_after = forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() ) created_before", "class WebhookFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('content_types', 'http_method',", "site_id = DynamicModelMultipleChoiceField( queryset=Site.objects.all(), required=False, label=_('Sites') ) device_type_id = DynamicModelMultipleChoiceField(", "('q',)), ('Attributes', ('content_type', 'enabled', 'new_window', 'weight')), ) content_type = ContentTypeChoiceField(", "= DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object Type'), widget=APISelectMultiple( api_url='/api/extras/content-types/', ) )", "= forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_update = forms.NullBooleanField(", "DynamicModelMultipleChoiceField( queryset=ClusterType.objects.all(), required=False, label=_('Cluster types'), fetch_trigger='open' ) cluster_group_id = DynamicModelMultipleChoiceField(", "from django.utils.translation import gettext as _ from dcim.models import DeviceRole,", ") created_by_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', )", "(None, ('q',)), ('Attributes', ('type', 'content_types', 'weight', 'required')), ) content_types =", "ClusterGroup, ClusterType __all__ = ( 'ConfigContextFilterForm', 'CustomFieldFilterForm', 'CustomLinkFilterForm', 'ExportTemplateFilterForm', 'JournalEntryFilterForm',", "(None, ('q',)), ('Attributes', ('content_types', 'http_method', 'enabled')), ('Events', ('type_create', 'type_update', 'type_delete')),", ") enabled = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_create", "queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_links'), required=False ) enabled = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES", "django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.utils.translation import", "object type') ) class ConfigContextFilterForm(FilterForm): fieldsets = ( (None, ('q',", "local_context_data = forms.NullBooleanField( required=False, label=_('Has local config context data'), widget=StaticSelect(", "required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) weight = forms.IntegerField( required=False )", "label=_('HTTP method') ) enabled = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES )", "'cluster_id')), ('Tenant', ('tenant_group_id', 'tenant_id')) ) region_id = DynamicModelMultipleChoiceField( queryset=Region.objects.all(), required=False,", "extras.models import * from extras.utils import FeatureQuery from netbox.forms.base import", "widget=DateTimePicker() ) created_by_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/',", "FilterForm, MultipleChoiceField, StaticSelect, TagFilterField, ) from virtualization.models import Cluster, ClusterGroup,", "label=_('Object Type'), widget=APISelectMultiple( api_url='/api/extras/content-types/', ) ) kind = forms.ChoiceField( choices=add_blank_choice(JournalEntryKindChoices),", "from extras.choices import * from extras.models import * from extras.utils", "= ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_fields'), required=False ) type = MultipleChoiceField( choices=CustomFieldTypeChoices,", "(None, ('q',)), ('Time', ('time_before', 'time_after')), ('Attributes', ('action', 'user_id', 'changed_object_type_id')), )", "queryset=Region.objects.all(), required=False, label=_('Regions') ) site_group_id = DynamicModelMultipleChoiceField( queryset=SiteGroup.objects.all(), required=False, label=_('Site", "tenant_id = DynamicModelMultipleChoiceField( queryset=Tenant.objects.all(), required=False, label=_('Tenant') ) tag_id = DynamicModelMultipleChoiceField(", ") cluster_id = DynamicModelMultipleChoiceField( queryset=Cluster.objects.all(), required=False, label=_('Clusters') ) tenant_group_id =", "Region, Site, SiteGroup from extras.choices import * from extras.models import", "DynamicModelMultipleChoiceField( queryset=Tag.objects.all(), required=False, label=_('Tags') ) class LocalConfigContextFilterForm(forms.Form): local_context_data = forms.NullBooleanField(", "config context data'), widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class JournalEntryFilterForm(NetBoxModelFilterSetForm): model", "('tenant_group_id', 'tenant_id')) ) region_id = DynamicModelMultipleChoiceField( queryset=Region.objects.all(), required=False, label=_('Regions') )", "fieldsets = ( (None, ('q', 'tag')), ('Creation', ('created_before', 'created_after', 'created_by_id')),", "queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) ) assigned_object_type_id = DynamicModelMultipleChoiceField(", "forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() ) created_by_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False,", "django import forms from django.contrib.auth.models import User from django.contrib.contenttypes.models import", "time_before = forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() ) action = forms.ChoiceField(", "required=False, widget=StaticSelect() ) user_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple(", "as _ from dcim.models import DeviceRole, DeviceType, Platform, Region, Site,", "groups') ) site_id = DynamicModelMultipleChoiceField( queryset=Site.objects.all(), required=False, label=_('Sites') ) device_type_id", "forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() ) time_before = forms.DateTimeField( required=False, label=_('Before'),", "MultipleChoiceField( choices=CustomFieldTypeChoices, required=False, label=_('Field type') ) weight = forms.IntegerField( required=False", "assigned_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object Type'), widget=APISelectMultiple( api_url='/api/extras/content-types/', )", ") cluster_group_id = DynamicModelMultipleChoiceField( queryset=ClusterGroup.objects.all(), required=False, label=_('Cluster groups') ) cluster_id", "utilities.forms import ( add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField,", ") content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_links'), required=False ) enabled =", "widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class WebhookFilterForm(FilterForm): fieldsets = ( (None,", "forms.IntegerField( required=False ) class ExportTemplateFilterForm(FilterForm): fieldsets = ( (None, ('q',)),", ") platform_id = DynamicModelMultipleChoiceField( queryset=Platform.objects.all(), required=False, label=_('Platforms') ) cluster_type_id =", "'TagFilterForm', 'WebhookFilterForm', ) class CustomFieldFilterForm(FilterForm): fieldsets = ( (None, ('q',)),", "= ( (None, ('q',)), ('Time', ('time_before', 'time_after')), ('Attributes', ('action', 'user_id',", ") action = forms.ChoiceField( choices=add_blank_choice(ObjectChangeActionChoices), required=False, widget=StaticSelect() ) user_id =", "= ( (None, ('q',)), ('Attributes', ('content_types', 'http_method', 'enabled')), ('Events', ('type_create',", "choices=WebhookHttpMethodChoices, required=False, label=_('HTTP method') ) enabled = forms.NullBooleanField( required=False, widget=StaticSelect(", "label=_('Field type') ) weight = forms.IntegerField( required=False ) required =", "enabled = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) new_window =", ") ) type_create = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) )", "('q',)), ('Attributes', ('content_types', 'http_method', 'enabled')), ('Events', ('type_create', 'type_update', 'type_delete')), )", "required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) ) assigned_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(),", "widget=StaticSelect() ) tag = TagFilterField(model) class ObjectChangeFilterForm(FilterForm): model = ObjectChange", "changed_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object Type'), widget=APISelectMultiple( api_url='/api/extras/content-types/', )", "MultipleChoiceField, StaticSelect, TagFilterField, ) from virtualization.models import Cluster, ClusterGroup, ClusterType", "file_extension = forms.CharField( required=False ) as_attachment = forms.NullBooleanField( required=False, widget=StaticSelect(", "django.utils.translation import gettext as _ from dcim.models import DeviceRole, DeviceType,", ") ) weight = forms.IntegerField( required=False ) class ExportTemplateFilterForm(FilterForm): fieldsets", "= DynamicModelMultipleChoiceField( queryset=Tag.objects.all(), required=False, label=_('Tags') ) class LocalConfigContextFilterForm(forms.Form): local_context_data =", "= ( 'ConfigContextFilterForm', 'CustomFieldFilterForm', 'CustomLinkFilterForm', 'ExportTemplateFilterForm', 'JournalEntryFilterForm', 'LocalConfigContextFilterForm', 'ObjectChangeFilterForm', 'TagFilterForm',", "'weight', 'required')), ) content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_fields'), required=False )", ") created_before = forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() ) created_by_id =", "TagFilterForm(FilterForm): model = Tag content_type_id = ContentTypeMultipleChoiceField( queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()), required=False, label=_('Tagged", "import * from extras.models import * from extras.utils import FeatureQuery", "created_before = forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() ) created_by_id = DynamicModelMultipleChoiceField(", "from django.contrib.contenttypes.models import ContentType from django.utils.translation import gettext as _", "'as_attachment')), ) content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('export_templates'), required=False ) mime_type", "required=False, label=_('Tenant') ) tag_id = DynamicModelMultipleChoiceField( queryset=Tag.objects.all(), required=False, label=_('Tags') )", "__all__ = ( 'ConfigContextFilterForm', 'CustomFieldFilterForm', 'CustomLinkFilterForm', 'ExportTemplateFilterForm', 'JournalEntryFilterForm', 'LocalConfigContextFilterForm', 'ObjectChangeFilterForm',", "label=_('Before'), widget=DateTimePicker() ) action = forms.ChoiceField( choices=add_blank_choice(ObjectChangeActionChoices), required=False, widget=StaticSelect() )", "( (None, ('q',)), ('Attributes', ('content_type', 'mime_type', 'file_extension', 'as_attachment')), ) content_type", "forms from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from", "'LocalConfigContextFilterForm', 'ObjectChangeFilterForm', 'TagFilterForm', 'WebhookFilterForm', ) class CustomFieldFilterForm(FilterForm): fieldsets = (", "required=False ) type = MultipleChoiceField( choices=CustomFieldTypeChoices, required=False, label=_('Field type') )", "DynamicModelMultipleChoiceField( queryset=SiteGroup.objects.all(), required=False, label=_('Site groups') ) site_id = DynamicModelMultipleChoiceField( queryset=Site.objects.all(),", "required=False, label=_('Tenant groups') ) tenant_id = DynamicModelMultipleChoiceField( queryset=Tenant.objects.all(), required=False, label=_('Tenant')", "'new_window', 'weight')), ) content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_links'), required=False )", "required = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class CustomLinkFilterForm(FilterForm):", ") region_id = DynamicModelMultipleChoiceField( queryset=Region.objects.all(), required=False, label=_('Regions') ) site_group_id =", ") kind = forms.ChoiceField( choices=add_blank_choice(JournalEntryKindChoices), required=False, widget=StaticSelect() ) tag =", "LocalConfigContextFilterForm(forms.Form): local_context_data = forms.NullBooleanField( required=False, label=_('Has local config context data'),", "DynamicModelMultipleChoiceField( queryset=Site.objects.all(), required=False, label=_('Sites') ) device_type_id = DynamicModelMultipleChoiceField( queryset=DeviceType.objects.all(), required=False,", "= DynamicModelMultipleChoiceField( queryset=Platform.objects.all(), required=False, label=_('Platforms') ) cluster_type_id = DynamicModelMultipleChoiceField( queryset=ClusterType.objects.all(),", "import ( add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm,", "action = forms.ChoiceField( choices=add_blank_choice(ObjectChangeActionChoices), required=False, widget=StaticSelect() ) user_id = DynamicModelMultipleChoiceField(", "queryset=ContentType.objects.all(), required=False, label=_('Object Type'), widget=APISelectMultiple( api_url='/api/extras/content-types/', ) ) kind =", "type_update = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_delete =", "ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_fields'), required=False ) type = MultipleChoiceField( choices=CustomFieldTypeChoices, required=False,", "= DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) ) changed_object_type_id", "choices=BOOLEAN_WITH_BLANK_CHOICES ) ) weight = forms.IntegerField( required=False ) class ExportTemplateFilterForm(FilterForm):", "label=_('Cluster groups') ) cluster_id = DynamicModelMultipleChoiceField( queryset=Cluster.objects.all(), required=False, label=_('Clusters') )", "choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class CustomLinkFilterForm(FilterForm): fieldsets = ( (None, ('q',)),", "('q',)), ('Attributes', ('content_type', 'mime_type', 'file_extension', 'as_attachment')), ) content_type = ContentTypeChoiceField(", "= DynamicModelMultipleChoiceField( queryset=Cluster.objects.all(), required=False, label=_('Clusters') ) tenant_group_id = DynamicModelMultipleChoiceField( queryset=TenantGroup.objects.all(),", "widget=DateTimePicker() ) time_before = forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() ) action", "required=False ) class ExportTemplateFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes',", "api_url='/api/extras/content-types/', ) ) kind = forms.ChoiceField( choices=add_blank_choice(JournalEntryKindChoices), required=False, widget=StaticSelect() )", "('Attributes', ('action', 'user_id', 'changed_object_type_id')), ) time_after = forms.DateTimeField( required=False, label=_('After'),", "required=False, label=_('Clusters') ) tenant_group_id = DynamicModelMultipleChoiceField( queryset=TenantGroup.objects.all(), required=False, label=_('Tenant groups')", "user_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) )", "'time_after')), ('Attributes', ('action', 'user_id', 'changed_object_type_id')), ) time_after = forms.DateTimeField( required=False,", "= ( (None, ('q',)), ('Attributes', ('content_type', 'enabled', 'new_window', 'weight')), )", "region_id = DynamicModelMultipleChoiceField( queryset=Region.objects.all(), required=False, label=_('Regions') ) site_group_id = DynamicModelMultipleChoiceField(", "widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_update = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES", "virtualization.models import Cluster, ClusterGroup, ClusterType __all__ = ( 'ConfigContextFilterForm', 'CustomFieldFilterForm',", "groups') ) cluster_id = DynamicModelMultipleChoiceField( queryset=Cluster.objects.all(), required=False, label=_('Clusters') ) tenant_group_id", ") mime_type = forms.CharField( required=False, label=_('MIME type') ) file_extension =", "('Attributes', ('type', 'content_types', 'weight', 'required')), ) content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(),", "= Tag content_type_id = ContentTypeMultipleChoiceField( queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()), required=False, label=_('Tagged object type')", "required=False, label=_('Roles') ) platform_id = DynamicModelMultipleChoiceField( queryset=Platform.objects.all(), required=False, label=_('Platforms') )", "required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) ) changed_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(),", "= DynamicModelMultipleChoiceField( queryset=SiteGroup.objects.all(), required=False, label=_('Site groups') ) site_id = DynamicModelMultipleChoiceField(", "('region_id', 'site_group_id', 'site_id')), ('Device', ('device_type_id', 'platform_id', 'role_id')), ('Cluster', ('cluster_type_id', 'cluster_group_id',", ") ) class JournalEntryFilterForm(NetBoxModelFilterSetForm): model = JournalEntry fieldsets = (", "= ( (None, ('q', 'tag_id')), ('Location', ('region_id', 'site_group_id', 'site_id')), ('Device',", "label=_('After'), widget=DateTimePicker() ) time_before = forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() )", ") ) assigned_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object Type'), widget=APISelectMultiple(", "forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_update = forms.NullBooleanField( required=False,", "limit_choices_to=FeatureQuery('webhooks'), required=False ) http_method = MultipleChoiceField( choices=WebhookHttpMethodChoices, required=False, label=_('HTTP method')", "APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField,", "= forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() ) created_before = forms.DateTimeField( required=False,", "choices=add_blank_choice(ObjectChangeActionChoices), required=False, widget=StaticSelect() ) user_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'),", "queryset=DeviceRole.objects.all(), required=False, label=_('Roles') ) platform_id = DynamicModelMultipleChoiceField( queryset=Platform.objects.all(), required=False, label=_('Platforms')", "label=_('Cluster types'), fetch_trigger='open' ) cluster_group_id = DynamicModelMultipleChoiceField( queryset=ClusterGroup.objects.all(), required=False, label=_('Cluster", "= MultipleChoiceField( choices=CustomFieldTypeChoices, required=False, label=_('Field type') ) weight = forms.IntegerField(", ") ) type_delete = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) )", ") changed_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object Type'), widget=APISelectMultiple( api_url='/api/extras/content-types/',", "tag = TagFilterField(model) class ObjectChangeFilterForm(FilterForm): model = ObjectChange fieldsets =", "= ( (None, ('q', 'tag')), ('Creation', ('created_before', 'created_after', 'created_by_id')), ('Attributes',", "queryset=SiteGroup.objects.all(), required=False, label=_('Site groups') ) site_id = DynamicModelMultipleChoiceField( queryset=Site.objects.all(), required=False,", ") class CustomLinkFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('content_type',", "'CustomLinkFilterForm', 'ExportTemplateFilterForm', 'JournalEntryFilterForm', 'LocalConfigContextFilterForm', 'ObjectChangeFilterForm', 'TagFilterForm', 'WebhookFilterForm', ) class CustomFieldFilterForm(FilterForm):", "class JournalEntryFilterForm(NetBoxModelFilterSetForm): model = JournalEntry fieldsets = ( (None, ('q',", "from netbox.forms.base import NetBoxModelFilterSetForm from tenancy.models import Tenant, TenantGroup from", "type') ) weight = forms.IntegerField( required=False ) required = forms.NullBooleanField(", "'ExportTemplateFilterForm', 'JournalEntryFilterForm', 'LocalConfigContextFilterForm', 'ObjectChangeFilterForm', 'TagFilterForm', 'WebhookFilterForm', ) class CustomFieldFilterForm(FilterForm): fieldsets", "('content_types', 'http_method', 'enabled')), ('Events', ('type_create', 'type_update', 'type_delete')), ) content_types =", "choices=add_blank_choice(JournalEntryKindChoices), required=False, widget=StaticSelect() ) tag = TagFilterField(model) class ObjectChangeFilterForm(FilterForm): model", "= ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('export_templates'), required=False ) mime_type = forms.CharField( required=False,", "DynamicModelMultipleChoiceField( queryset=Tenant.objects.all(), required=False, label=_('Tenant') ) tag_id = DynamicModelMultipleChoiceField( queryset=Tag.objects.all(), required=False,", "DynamicModelMultipleChoiceField( queryset=ClusterGroup.objects.all(), required=False, label=_('Cluster groups') ) cluster_id = DynamicModelMultipleChoiceField( queryset=Cluster.objects.all(),", "('device_type_id', 'platform_id', 'role_id')), ('Cluster', ('cluster_type_id', 'cluster_group_id', 'cluster_id')), ('Tenant', ('tenant_group_id', 'tenant_id'))", "'created_by_id')), ('Attributes', ('assigned_object_type_id', 'kind')) ) created_after = forms.DateTimeField( required=False, label=_('After'),", "import forms from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType", ") tenant_id = DynamicModelMultipleChoiceField( queryset=Tenant.objects.all(), required=False, label=_('Tenant') ) tag_id =", "= forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() ) time_before = forms.DateTimeField( required=False,", "DynamicModelMultipleChoiceField( queryset=DeviceType.objects.all(), required=False, label=_('Device types') ) role_id = DynamicModelMultipleChoiceField( queryset=DeviceRole.objects.all(),", ") as_attachment = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class", ") assigned_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object Type'), widget=APISelectMultiple( api_url='/api/extras/content-types/',", "('type', 'content_types', 'weight', 'required')), ) content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_fields'),", "'user_id', 'changed_object_type_id')), ) time_after = forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() )", "forms.CharField( required=False, label=_('MIME type') ) file_extension = forms.CharField( required=False )", "content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_links'), required=False ) enabled = forms.NullBooleanField(", "type = MultipleChoiceField( choices=CustomFieldTypeChoices, required=False, label=_('Field type') ) weight =", ") type = MultipleChoiceField( choices=CustomFieldTypeChoices, required=False, label=_('Field type') ) weight", "choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_update = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES )", "('cluster_type_id', 'cluster_group_id', 'cluster_id')), ('Tenant', ('tenant_group_id', 'tenant_id')) ) region_id = DynamicModelMultipleChoiceField(", "cluster_id = DynamicModelMultipleChoiceField( queryset=Cluster.objects.all(), required=False, label=_('Clusters') ) tenant_group_id = DynamicModelMultipleChoiceField(", "'enabled', 'new_window', 'weight')), ) content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_links'), required=False", "api_url='/api/users/users/', ) ) changed_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object Type'),", "tenant_group_id = DynamicModelMultipleChoiceField( queryset=TenantGroup.objects.all(), required=False, label=_('Tenant groups') ) tenant_id =", "queryset=Tag.objects.all(), required=False, label=_('Tags') ) class LocalConfigContextFilterForm(forms.Form): local_context_data = forms.NullBooleanField( required=False,", "DeviceRole, DeviceType, Platform, Region, Site, SiteGroup from extras.choices import *", "queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()), required=False, label=_('Tagged object type') ) class ConfigContextFilterForm(FilterForm): fieldsets =", "= forms.IntegerField( required=False ) required = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES", "* from extras.models import * from extras.utils import FeatureQuery from", "from utilities.forms import ( add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker,", "required=False, label=_('Device types') ) role_id = DynamicModelMultipleChoiceField( queryset=DeviceRole.objects.all(), required=False, label=_('Roles')", "required=False, label=_('After'), widget=DateTimePicker() ) time_before = forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker()", "ClusterType __all__ = ( 'ConfigContextFilterForm', 'CustomFieldFilterForm', 'CustomLinkFilterForm', 'ExportTemplateFilterForm', 'JournalEntryFilterForm', 'LocalConfigContextFilterForm',", "('content_type', 'enabled', 'new_window', 'weight')), ) content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_links'),", "extras.choices import * from extras.models import * from extras.utils import", "type') ) class ConfigContextFilterForm(FilterForm): fieldsets = ( (None, ('q', 'tag_id')),", "label=_('Device types') ) role_id = DynamicModelMultipleChoiceField( queryset=DeviceRole.objects.all(), required=False, label=_('Roles') )", "DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField, ) from virtualization.models import Cluster,", "= DynamicModelMultipleChoiceField( queryset=TenantGroup.objects.all(), required=False, label=_('Tenant groups') ) tenant_id = DynamicModelMultipleChoiceField(", ") required = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class", "DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object Type'), widget=APISelectMultiple( api_url='/api/extras/content-types/', ) ) kind", "model = Tag content_type_id = ContentTypeMultipleChoiceField( queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()), required=False, label=_('Tagged object", ") time_before = forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() ) action =", "StaticSelect, TagFilterField, ) from virtualization.models import Cluster, ClusterGroup, ClusterType __all__", "device_type_id = DynamicModelMultipleChoiceField( queryset=DeviceType.objects.all(), required=False, label=_('Device types') ) role_id =", "model = ObjectChange fieldsets = ( (None, ('q',)), ('Time', ('time_before',", "import FeatureQuery from netbox.forms.base import NetBoxModelFilterSetForm from tenancy.models import Tenant,", "('created_before', 'created_after', 'created_by_id')), ('Attributes', ('assigned_object_type_id', 'kind')) ) created_after = forms.DateTimeField(", "'weight')), ) content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_links'), required=False ) enabled", "required=False, label=_('After'), widget=DateTimePicker() ) created_before = forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker()", "'created_after', 'created_by_id')), ('Attributes', ('assigned_object_type_id', 'kind')) ) created_after = forms.DateTimeField( required=False,", "= DynamicModelMultipleChoiceField( queryset=ClusterType.objects.all(), required=False, label=_('Cluster types'), fetch_trigger='open' ) cluster_group_id =", "types'), fetch_trigger='open' ) cluster_group_id = DynamicModelMultipleChoiceField( queryset=ClusterGroup.objects.all(), required=False, label=_('Cluster groups')", "(None, ('q', 'tag_id')), ('Location', ('region_id', 'site_group_id', 'site_id')), ('Device', ('device_type_id', 'platform_id',", "gettext as _ from dcim.models import DeviceRole, DeviceType, Platform, Region,", "ExportTemplateFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('content_type', 'mime_type', 'file_extension',", ") content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('webhooks'), required=False ) http_method =", "label=_('Before'), widget=DateTimePicker() ) created_by_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple(", "DeviceType, Platform, Region, Site, SiteGroup from extras.choices import * from", "required=False, label=_('MIME type') ) file_extension = forms.CharField( required=False ) as_attachment", "required=False ) http_method = MultipleChoiceField( choices=WebhookHttpMethodChoices, required=False, label=_('HTTP method') )", "= ObjectChange fieldsets = ( (None, ('q',)), ('Time', ('time_before', 'time_after')),", "('time_before', 'time_after')), ('Attributes', ('action', 'user_id', 'changed_object_type_id')), ) time_after = forms.DateTimeField(", "choices=BOOLEAN_WITH_BLANK_CHOICES ) ) new_window = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES )", ") site_id = DynamicModelMultipleChoiceField( queryset=Site.objects.all(), required=False, label=_('Sites') ) device_type_id =", "= ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_links'), required=False ) enabled = forms.NullBooleanField( required=False,", "ContentType from django.utils.translation import gettext as _ from dcim.models import", "platform_id = DynamicModelMultipleChoiceField( queryset=Platform.objects.all(), required=False, label=_('Platforms') ) cluster_type_id = DynamicModelMultipleChoiceField(", ") tag = TagFilterField(model) class ObjectChangeFilterForm(FilterForm): model = ObjectChange fieldsets", "required=False, label=_('Cluster types'), fetch_trigger='open' ) cluster_group_id = DynamicModelMultipleChoiceField( queryset=ClusterGroup.objects.all(), required=False,", "('Attributes', ('content_types', 'http_method', 'enabled')), ('Events', ('type_create', 'type_update', 'type_delete')), ) content_types", "('q', 'tag_id')), ('Location', ('region_id', 'site_group_id', 'site_id')), ('Device', ('device_type_id', 'platform_id', 'role_id')),", "required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) new_window = forms.NullBooleanField( required=False, widget=StaticSelect(", "widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class JournalEntryFilterForm(NetBoxModelFilterSetForm): model = JournalEntry fieldsets", "Site, SiteGroup from extras.choices import * from extras.models import *", "= forms.NullBooleanField( required=False, label=_('Has local config context data'), widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES", "'role_id')), ('Cluster', ('cluster_type_id', 'cluster_group_id', 'cluster_id')), ('Tenant', ('tenant_group_id', 'tenant_id')) ) region_id", "'site_id')), ('Device', ('device_type_id', 'platform_id', 'role_id')), ('Cluster', ('cluster_type_id', 'cluster_group_id', 'cluster_id')), ('Tenant',", "= MultipleChoiceField( choices=WebhookHttpMethodChoices, required=False, label=_('HTTP method') ) enabled = forms.NullBooleanField(", "choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_create = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES )", "time_after = forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() ) time_before = forms.DateTimeField(", "widget=APISelectMultiple( api_url='/api/users/users/', ) ) assigned_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object", "forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class TagFilterForm(FilterForm): model =", "model = JournalEntry fieldsets = ( (None, ('q', 'tag')), ('Creation',", "import Tenant, TenantGroup from utilities.forms import ( add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES,", "= forms.ChoiceField( choices=add_blank_choice(JournalEntryKindChoices), required=False, widget=StaticSelect() ) tag = TagFilterField(model) class", "enabled = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_create =", "fieldsets = ( (None, ('q',)), ('Time', ('time_before', 'time_after')), ('Attributes', ('action',", "forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_delete = forms.NullBooleanField( required=False,", "User from django.contrib.contenttypes.models import ContentType from django.utils.translation import gettext as", "fetch_trigger='open' ) cluster_group_id = DynamicModelMultipleChoiceField( queryset=ClusterGroup.objects.all(), required=False, label=_('Cluster groups') )", "queryset=Cluster.objects.all(), required=False, label=_('Clusters') ) tenant_group_id = DynamicModelMultipleChoiceField( queryset=TenantGroup.objects.all(), required=False, label=_('Tenant", "queryset=DeviceType.objects.all(), required=False, label=_('Device types') ) role_id = DynamicModelMultipleChoiceField( queryset=DeviceRole.objects.all(), required=False,", "'http_method', 'enabled')), ('Events', ('type_create', 'type_update', 'type_delete')), ) content_types = ContentTypeMultipleChoiceField(", "from django import forms from django.contrib.auth.models import User from django.contrib.contenttypes.models", ") class TagFilterForm(FilterForm): model = Tag content_type_id = ContentTypeMultipleChoiceField( queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()),", "('Attributes', ('content_type', 'enabled', 'new_window', 'weight')), ) content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(),", "queryset=TenantGroup.objects.all(), required=False, label=_('Tenant groups') ) tenant_id = DynamicModelMultipleChoiceField( queryset=Tenant.objects.all(), required=False,", "ObjectChangeFilterForm(FilterForm): model = ObjectChange fieldsets = ( (None, ('q',)), ('Time',", "= forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) new_window = forms.NullBooleanField(", "content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('webhooks'), required=False ) http_method = MultipleChoiceField(", "content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_fields'), required=False ) type = MultipleChoiceField(", "required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_update = forms.NullBooleanField( required=False, widget=StaticSelect(", "queryset=Platform.objects.all(), required=False, label=_('Platforms') ) cluster_type_id = DynamicModelMultipleChoiceField( queryset=ClusterType.objects.all(), required=False, label=_('Cluster", "content_type_id = ContentTypeMultipleChoiceField( queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()), required=False, label=_('Tagged object type') ) class", "'site_group_id', 'site_id')), ('Device', ('device_type_id', 'platform_id', 'role_id')), ('Cluster', ('cluster_type_id', 'cluster_group_id', 'cluster_id')),", "Tenant, TenantGroup from utilities.forms import ( add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField,", "from extras.models import * from extras.utils import FeatureQuery from netbox.forms.base", "api_url='/api/users/users/', ) ) assigned_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object Type'),", ") content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('export_templates'), required=False ) mime_type =", "required=False, label=_('Before'), widget=DateTimePicker() ) created_by_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'),", "label=_('Tagged object type') ) class ConfigContextFilterForm(FilterForm): fieldsets = ( (None,", "fieldsets = ( (None, ('q', 'tag_id')), ('Location', ('region_id', 'site_group_id', 'site_id')),", "required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class TagFilterForm(FilterForm): model = Tag", "= DynamicModelMultipleChoiceField( queryset=DeviceRole.objects.all(), required=False, label=_('Roles') ) platform_id = DynamicModelMultipleChoiceField( queryset=Platform.objects.all(),", "label=_('Sites') ) device_type_id = DynamicModelMultipleChoiceField( queryset=DeviceType.objects.all(), required=False, label=_('Device types') )", "= DynamicModelMultipleChoiceField( queryset=ClusterGroup.objects.all(), required=False, label=_('Cluster groups') ) cluster_id = DynamicModelMultipleChoiceField(", ") class ExportTemplateFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('content_type',", "('Cluster', ('cluster_type_id', 'cluster_group_id', 'cluster_id')), ('Tenant', ('tenant_group_id', 'tenant_id')) ) region_id =", "from tenancy.models import Tenant, TenantGroup from utilities.forms import ( add_blank_choice,", "('q', 'tag')), ('Creation', ('created_before', 'created_after', 'created_by_id')), ('Attributes', ('assigned_object_type_id', 'kind')) )", "netbox.forms.base import NetBoxModelFilterSetForm from tenancy.models import Tenant, TenantGroup from utilities.forms", "queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('export_templates'), required=False ) mime_type = forms.CharField( required=False, label=_('MIME type')", "required=False, label=_('Field type') ) weight = forms.IntegerField( required=False ) required", "(None, ('q',)), ('Attributes', ('content_type', 'enabled', 'new_window', 'weight')), ) content_type =", "TagFilterField, ) from virtualization.models import Cluster, ClusterGroup, ClusterType __all__ =", "queryset=ClusterGroup.objects.all(), required=False, label=_('Cluster groups') ) cluster_id = DynamicModelMultipleChoiceField( queryset=Cluster.objects.all(), required=False,", "fieldsets = ( (None, ('q',)), ('Attributes', ('type', 'content_types', 'weight', 'required')),", "extras.utils import FeatureQuery from netbox.forms.base import NetBoxModelFilterSetForm from tenancy.models import", "'tenant_id')) ) region_id = DynamicModelMultipleChoiceField( queryset=Region.objects.all(), required=False, label=_('Regions') ) site_group_id", ") ) changed_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object Type'), widget=APISelectMultiple(", ") type_delete = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class", "NetBoxModelFilterSetForm from tenancy.models import Tenant, TenantGroup from utilities.forms import (", "required=False, label=_('Before'), widget=DateTimePicker() ) action = forms.ChoiceField( choices=add_blank_choice(ObjectChangeActionChoices), required=False, widget=StaticSelect()", "('Location', ('region_id', 'site_group_id', 'site_id')), ('Device', ('device_type_id', 'platform_id', 'role_id')), ('Cluster', ('cluster_type_id',", "required=False, label=_('Tagged object type') ) class ConfigContextFilterForm(FilterForm): fieldsets = (", "'type_delete')), ) content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('webhooks'), required=False ) http_method", "ObjectChange fieldsets = ( (None, ('q',)), ('Time', ('time_before', 'time_after')), ('Attributes',", "widget=DateTimePicker() ) created_before = forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() ) created_by_id", "required=False, widget=StaticSelect() ) tag = TagFilterField(model) class ObjectChangeFilterForm(FilterForm): model =", "choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class JournalEntryFilterForm(NetBoxModelFilterSetForm): model = JournalEntry fieldsets =", "('Time', ('time_before', 'time_after')), ('Attributes', ('action', 'user_id', 'changed_object_type_id')), ) time_after =", "= forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() ) created_by_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(),", ") user_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', )", "= forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class CustomLinkFilterForm(FilterForm): fieldsets", ") site_group_id = DynamicModelMultipleChoiceField( queryset=SiteGroup.objects.all(), required=False, label=_('Site groups') ) site_id", "= JournalEntry fieldsets = ( (None, ('q', 'tag')), ('Creation', ('created_before',", "queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) ) changed_object_type_id = DynamicModelMultipleChoiceField(", "type_create = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_update =", "add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect,", "choices=CustomFieldTypeChoices, required=False, label=_('Field type') ) weight = forms.IntegerField( required=False )", "ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('webhooks'), required=False ) http_method = MultipleChoiceField( choices=WebhookHttpMethodChoices, required=False,", "= forms.CharField( required=False ) as_attachment = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES", ") class CustomFieldFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('type',", "= ContentTypeMultipleChoiceField( queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()), required=False, label=_('Tagged object type') ) class ConfigContextFilterForm(FilterForm):", ") from virtualization.models import Cluster, ClusterGroup, ClusterType __all__ = (", "= forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() ) action = forms.ChoiceField( choices=add_blank_choice(ObjectChangeActionChoices),", "import ContentType from django.utils.translation import gettext as _ from dcim.models", "required=False, label=_('Cluster groups') ) cluster_id = DynamicModelMultipleChoiceField( queryset=Cluster.objects.all(), required=False, label=_('Clusters')", "cluster_type_id = DynamicModelMultipleChoiceField( queryset=ClusterType.objects.all(), required=False, label=_('Cluster types'), fetch_trigger='open' ) cluster_group_id", "_ from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup", "types') ) role_id = DynamicModelMultipleChoiceField( queryset=DeviceRole.objects.all(), required=False, label=_('Roles') ) platform_id", "WebhookFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('content_types', 'http_method', 'enabled')),", "forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_create = forms.NullBooleanField( required=False,", "required=False ) required = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) )", "= DynamicModelMultipleChoiceField( queryset=DeviceType.objects.all(), required=False, label=_('Device types') ) role_id = DynamicModelMultipleChoiceField(", ") cluster_type_id = DynamicModelMultipleChoiceField( queryset=ClusterType.objects.all(), required=False, label=_('Cluster types'), fetch_trigger='open' )", "label=_('Platforms') ) cluster_type_id = DynamicModelMultipleChoiceField( queryset=ClusterType.objects.all(), required=False, label=_('Cluster types'), fetch_trigger='open'", "'tag')), ('Creation', ('created_before', 'created_after', 'created_by_id')), ('Attributes', ('assigned_object_type_id', 'kind')) ) created_after", "JournalEntryFilterForm(NetBoxModelFilterSetForm): model = JournalEntry fieldsets = ( (None, ('q', 'tag')),", "'changed_object_type_id')), ) time_after = forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() ) time_before", "created_by_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) )", "class TagFilterForm(FilterForm): model = Tag content_type_id = ContentTypeMultipleChoiceField( queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()), required=False,", "weight = forms.IntegerField( required=False ) class ExportTemplateFilterForm(FilterForm): fieldsets = (", "('Events', ('type_create', 'type_update', 'type_delete')), ) content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('webhooks'),", "class ObjectChangeFilterForm(FilterForm): model = ObjectChange fieldsets = ( (None, ('q',)),", "( 'ConfigContextFilterForm', 'CustomFieldFilterForm', 'CustomLinkFilterForm', 'ExportTemplateFilterForm', 'JournalEntryFilterForm', 'LocalConfigContextFilterForm', 'ObjectChangeFilterForm', 'TagFilterForm', 'WebhookFilterForm',", "'WebhookFilterForm', ) class CustomFieldFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes',", "required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class WebhookFilterForm(FilterForm): fieldsets = (", "'tag_id')), ('Location', ('region_id', 'site_group_id', 'site_id')), ('Device', ('device_type_id', 'platform_id', 'role_id')), ('Cluster',", "label=_('Regions') ) site_group_id = DynamicModelMultipleChoiceField( queryset=SiteGroup.objects.all(), required=False, label=_('Site groups') )", "'type_update', 'type_delete')), ) content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('webhooks'), required=False )", "label=_('Roles') ) platform_id = DynamicModelMultipleChoiceField( queryset=Platform.objects.all(), required=False, label=_('Platforms') ) cluster_type_id", ") role_id = DynamicModelMultipleChoiceField( queryset=DeviceRole.objects.all(), required=False, label=_('Roles') ) platform_id =", "import gettext as _ from dcim.models import DeviceRole, DeviceType, Platform,", "content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('export_templates'), required=False ) mime_type = forms.CharField(", "label=_('Has local config context data'), widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class", ") type_create = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_update", "tenancy.models import Tenant, TenantGroup from utilities.forms import ( add_blank_choice, APISelectMultiple,", "JournalEntry fieldsets = ( (None, ('q', 'tag')), ('Creation', ('created_before', 'created_after',", "required=False, label=_('Object Type'), widget=APISelectMultiple( api_url='/api/extras/content-types/', ) ) kind = forms.ChoiceField(", "= forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) weight = forms.IntegerField(", "= forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class WebhookFilterForm(FilterForm): fieldsets", "= forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_create = forms.NullBooleanField(", "= forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class TagFilterForm(FilterForm): model", "kind = forms.ChoiceField( choices=add_blank_choice(JournalEntryKindChoices), required=False, widget=StaticSelect() ) tag = TagFilterField(model)", "widget=StaticSelect() ) user_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/',", "widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class CustomLinkFilterForm(FilterForm): fieldsets = ( (None,", "'required')), ) content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_fields'), required=False ) type", "choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class TagFilterForm(FilterForm): model = Tag content_type_id =", "TagFilterField(model) class ObjectChangeFilterForm(FilterForm): model = ObjectChange fieldsets = ( (None,", "limit_choices_to=FeatureQuery('custom_fields'), required=False ) type = MultipleChoiceField( choices=CustomFieldTypeChoices, required=False, label=_('Field type')", "('content_type', 'mime_type', 'file_extension', 'as_attachment')), ) content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('export_templates'),", "forms.CharField( required=False ) as_attachment = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES )", "( (None, ('q',)), ('Attributes', ('type', 'content_types', 'weight', 'required')), ) content_types", "('action', 'user_id', 'changed_object_type_id')), ) time_after = forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker()", "label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) ) assigned_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False,", "required=False, label=_('Site groups') ) site_id = DynamicModelMultipleChoiceField( queryset=Site.objects.all(), required=False, label=_('Sites')", ") ) new_window = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) )", "from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup from", "'ObjectChangeFilterForm', 'TagFilterForm', 'WebhookFilterForm', ) class CustomFieldFilterForm(FilterForm): fieldsets = ( (None,", "required=False ) mime_type = forms.CharField( required=False, label=_('MIME type') ) file_extension", "DynamicModelMultipleChoiceField( queryset=Platform.objects.all(), required=False, label=_('Platforms') ) cluster_type_id = DynamicModelMultipleChoiceField( queryset=ClusterType.objects.all(), required=False,", "django.contrib.contenttypes.models import ContentType from django.utils.translation import gettext as _ from", "= DynamicModelMultipleChoiceField( queryset=Site.objects.all(), required=False, label=_('Sites') ) device_type_id = DynamicModelMultipleChoiceField( queryset=DeviceType.objects.all(),", "class LocalConfigContextFilterForm(forms.Form): local_context_data = forms.NullBooleanField( required=False, label=_('Has local config context", "= forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_delete = forms.NullBooleanField(", "cluster_group_id = DynamicModelMultipleChoiceField( queryset=ClusterGroup.objects.all(), required=False, label=_('Cluster groups') ) cluster_id =", "(None, ('q', 'tag')), ('Creation', ('created_before', 'created_after', 'created_by_id')), ('Attributes', ('assigned_object_type_id', 'kind'))", ") file_extension = forms.CharField( required=False ) as_attachment = forms.NullBooleanField( required=False,", "required=False, label=_('Sites') ) device_type_id = DynamicModelMultipleChoiceField( queryset=DeviceType.objects.all(), required=False, label=_('Device types')", "from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.utils.translation", ") class WebhookFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('content_types',", "( (None, ('q', 'tag_id')), ('Location', ('region_id', 'site_group_id', 'site_id')), ('Device', ('device_type_id',", "import NetBoxModelFilterSetForm from tenancy.models import Tenant, TenantGroup from utilities.forms import", "CustomLinkFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('content_type', 'enabled', 'new_window',", "DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) ) changed_object_type_id =", "( (None, ('q',)), ('Attributes', ('content_type', 'enabled', 'new_window', 'weight')), ) content_type", "label=_('Tags') ) class LocalConfigContextFilterForm(forms.Form): local_context_data = forms.NullBooleanField( required=False, label=_('Has local", "('Tenant', ('tenant_group_id', 'tenant_id')) ) region_id = DynamicModelMultipleChoiceField( queryset=Region.objects.all(), required=False, label=_('Regions')", "DynamicModelMultipleChoiceField( queryset=TenantGroup.objects.all(), required=False, label=_('Tenant groups') ) tenant_id = DynamicModelMultipleChoiceField( queryset=Tenant.objects.all(),", "groups') ) tenant_id = DynamicModelMultipleChoiceField( queryset=Tenant.objects.all(), required=False, label=_('Tenant') ) tag_id", "queryset=ClusterType.objects.all(), required=False, label=_('Cluster types'), fetch_trigger='open' ) cluster_group_id = DynamicModelMultipleChoiceField( queryset=ClusterGroup.objects.all(),", "ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('export_templates'), required=False ) mime_type = forms.CharField( required=False, label=_('MIME", "class CustomFieldFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('type', 'content_types',", "local config context data'), widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class JournalEntryFilterForm(NetBoxModelFilterSetForm):", "= forms.ChoiceField( choices=add_blank_choice(ObjectChangeActionChoices), required=False, widget=StaticSelect() ) user_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(),", ") ) class WebhookFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes',", "( add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField,", "= ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('webhooks'), required=False ) http_method = MultipleChoiceField( choices=WebhookHttpMethodChoices,", "dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup from extras.choices", "context data'), widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class JournalEntryFilterForm(NetBoxModelFilterSetForm): model =", ") weight = forms.IntegerField( required=False ) required = forms.NullBooleanField( required=False,", "('Device', ('device_type_id', 'platform_id', 'role_id')), ('Cluster', ('cluster_type_id', 'cluster_group_id', 'cluster_id')), ('Tenant', ('tenant_group_id',", "as_attachment = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class WebhookFilterForm(FilterForm):", ") enabled = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) new_window", "widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class TagFilterForm(FilterForm): model = Tag content_type_id", ") tag_id = DynamicModelMultipleChoiceField( queryset=Tag.objects.all(), required=False, label=_('Tags') ) class LocalConfigContextFilterForm(forms.Form):", "mime_type = forms.CharField( required=False, label=_('MIME type') ) file_extension = forms.CharField(", "data'), widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class JournalEntryFilterForm(NetBoxModelFilterSetForm): model = JournalEntry", "forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class CustomLinkFilterForm(FilterForm): fieldsets =", ") http_method = MultipleChoiceField( choices=WebhookHttpMethodChoices, required=False, label=_('HTTP method') ) enabled", "queryset=Tenant.objects.all(), required=False, label=_('Tenant') ) tag_id = DynamicModelMultipleChoiceField( queryset=Tag.objects.all(), required=False, label=_('Tags')", "('assigned_object_type_id', 'kind')) ) created_after = forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() )", "class ExportTemplateFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('content_type', 'mime_type',", "'cluster_group_id', 'cluster_id')), ('Tenant', ('tenant_group_id', 'tenant_id')) ) region_id = DynamicModelMultipleChoiceField( queryset=Region.objects.all(),", "('type_create', 'type_update', 'type_delete')), ) content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('webhooks'), required=False", ") new_window = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) weight", "new_window = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) weight =", "ContentTypeMultipleChoiceField( queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()), required=False, label=_('Tagged object type') ) class ConfigContextFilterForm(FilterForm): fieldsets", "fieldsets = ( (None, ('q',)), ('Attributes', ('content_type', 'mime_type', 'file_extension', 'as_attachment')),", "forms.IntegerField( required=False ) required = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES )", "forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) weight = forms.IntegerField( required=False", "required=False, label=_('Regions') ) site_group_id = DynamicModelMultipleChoiceField( queryset=SiteGroup.objects.all(), required=False, label=_('Site groups')", "<reponame>cybarox/netbox<gh_stars>0 from django import forms from django.contrib.auth.models import User from", "required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_create = forms.NullBooleanField( required=False, widget=StaticSelect(", "type_delete = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class TagFilterForm(FilterForm):", "class ConfigContextFilterForm(FilterForm): fieldsets = ( (None, ('q', 'tag_id')), ('Location', ('region_id',", "required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class CustomLinkFilterForm(FilterForm): fieldsets = (", "queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_fields'), required=False ) type = MultipleChoiceField( choices=CustomFieldTypeChoices, required=False, label=_('Field", "label=_('After'), widget=DateTimePicker() ) created_before = forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() )", "DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField, ) from virtualization.models import", ") created_after = forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() ) created_before =", "forms.NullBooleanField( required=False, label=_('Has local config context data'), widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES )", "forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) new_window = forms.NullBooleanField( required=False,", ") type_update = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_delete", "label=_('MIME type') ) file_extension = forms.CharField( required=False ) as_attachment =", "widget=DateTimePicker() ) action = forms.ChoiceField( choices=add_blank_choice(ObjectChangeActionChoices), required=False, widget=StaticSelect() ) user_id", ") ) class CustomLinkFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes',", "= DynamicModelMultipleChoiceField( queryset=Region.objects.all(), required=False, label=_('Regions') ) site_group_id = DynamicModelMultipleChoiceField( queryset=SiteGroup.objects.all(),", "Tag content_type_id = ContentTypeMultipleChoiceField( queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()), required=False, label=_('Tagged object type') )", "forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() ) created_before = forms.DateTimeField( required=False, label=_('Before'),", "queryset=Site.objects.all(), required=False, label=_('Sites') ) device_type_id = DynamicModelMultipleChoiceField( queryset=DeviceType.objects.all(), required=False, label=_('Device", "( (None, ('q',)), ('Attributes', ('content_types', 'http_method', 'enabled')), ('Events', ('type_create', 'type_update',", ") class JournalEntryFilterForm(NetBoxModelFilterSetForm): model = JournalEntry fieldsets = ( (None,", ") content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_fields'), required=False ) type =", ") time_after = forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() ) time_before =", "SiteGroup from extras.choices import * from extras.models import * from", "widget=APISelectMultiple( api_url='/api/users/users/', ) ) changed_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False, label=_('Object", "label=_('Tenant groups') ) tenant_id = DynamicModelMultipleChoiceField( queryset=Tenant.objects.all(), required=False, label=_('Tenant') )", "ConfigContextFilterForm(FilterForm): fieldsets = ( (None, ('q', 'tag_id')), ('Location', ('region_id', 'site_group_id',", "'content_types', 'weight', 'required')), ) content_types = ContentTypeMultipleChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_fields'), required=False", "DynamicModelMultipleChoiceField( queryset=Region.objects.all(), required=False, label=_('Regions') ) site_group_id = DynamicModelMultipleChoiceField( queryset=SiteGroup.objects.all(), required=False,", "required=False, label=_('Platforms') ) cluster_type_id = DynamicModelMultipleChoiceField( queryset=ClusterType.objects.all(), required=False, label=_('Cluster types'),", "choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_delete = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES )", "widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_delete = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES", "choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class WebhookFilterForm(FilterForm): fieldsets = ( (None, ('q',)),", "import User from django.contrib.contenttypes.models import ContentType from django.utils.translation import gettext", ") ) type_update = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) )", "forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) class WebhookFilterForm(FilterForm): fieldsets =", "MultipleChoiceField( choices=WebhookHttpMethodChoices, required=False, label=_('HTTP method') ) enabled = forms.NullBooleanField( required=False,", "('q',)), ('Attributes', ('type', 'content_types', 'weight', 'required')), ) content_types = ContentTypeMultipleChoiceField(", "(None, ('q',)), ('Attributes', ('content_type', 'mime_type', 'file_extension', 'as_attachment')), ) content_type =", "forms.DateTimeField( required=False, label=_('Before'), widget=DateTimePicker() ) action = forms.ChoiceField( choices=add_blank_choice(ObjectChangeActionChoices), required=False,", ") class LocalConfigContextFilterForm(forms.Form): local_context_data = forms.NullBooleanField( required=False, label=_('Has local config", "tag_id = DynamicModelMultipleChoiceField( queryset=Tag.objects.all(), required=False, label=_('Tags') ) class LocalConfigContextFilterForm(forms.Form): local_context_data", "TenantGroup from utilities.forms import ( add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField,", ") ) class TagFilterForm(FilterForm): model = Tag content_type_id = ContentTypeMultipleChoiceField(", "http_method = MultipleChoiceField( choices=WebhookHttpMethodChoices, required=False, label=_('HTTP method') ) enabled =", "from extras.utils import FeatureQuery from netbox.forms.base import NetBoxModelFilterSetForm from tenancy.models", "ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField, ) from virtualization.models", "'JournalEntryFilterForm', 'LocalConfigContextFilterForm', 'ObjectChangeFilterForm', 'TagFilterForm', 'WebhookFilterForm', ) class CustomFieldFilterForm(FilterForm): fieldsets =", "import * from extras.utils import FeatureQuery from netbox.forms.base import NetBoxModelFilterSetForm", "forms.ChoiceField( choices=add_blank_choice(JournalEntryKindChoices), required=False, widget=StaticSelect() ) tag = TagFilterField(model) class ObjectChangeFilterForm(FilterForm):", "required=False ) as_attachment = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) )", "site_group_id = DynamicModelMultipleChoiceField( queryset=SiteGroup.objects.all(), required=False, label=_('Site groups') ) site_id =", "('Creation', ('created_before', 'created_after', 'created_by_id')), ('Attributes', ('assigned_object_type_id', 'kind')) ) created_after =", "= DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) ) assigned_object_type_id", "role_id = DynamicModelMultipleChoiceField( queryset=DeviceRole.objects.all(), required=False, label=_('Roles') ) platform_id = DynamicModelMultipleChoiceField(", "BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField, )", "ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('custom_links'), required=False ) enabled = forms.NullBooleanField( required=False, widget=StaticSelect(", "Cluster, ClusterGroup, ClusterType __all__ = ( 'ConfigContextFilterForm', 'CustomFieldFilterForm', 'CustomLinkFilterForm', 'ExportTemplateFilterForm',", "'mime_type', 'file_extension', 'as_attachment')), ) content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('export_templates'), required=False", "weight = forms.IntegerField( required=False ) required = forms.NullBooleanField( required=False, widget=StaticSelect(", "= DynamicModelMultipleChoiceField( queryset=Tenant.objects.all(), required=False, label=_('Tenant') ) tag_id = DynamicModelMultipleChoiceField( queryset=Tag.objects.all(),", "label=_('Tenant') ) tag_id = DynamicModelMultipleChoiceField( queryset=Tag.objects.all(), required=False, label=_('Tags') ) class", "required=False, label=_('Tags') ) class LocalConfigContextFilterForm(forms.Form): local_context_data = forms.NullBooleanField( required=False, label=_('Has", "queryset=ContentType.objects.all(), limit_choices_to=FeatureQuery('webhooks'), required=False ) http_method = MultipleChoiceField( choices=WebhookHttpMethodChoices, required=False, label=_('HTTP", ") weight = forms.IntegerField( required=False ) class ExportTemplateFilterForm(FilterForm): fieldsets =", "forms.ChoiceField( choices=add_blank_choice(ObjectChangeActionChoices), required=False, widget=StaticSelect() ) user_id = DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False,", "widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) type_create = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES", "type') ) file_extension = forms.CharField( required=False ) as_attachment = forms.NullBooleanField(", "DynamicModelMultipleChoiceField( queryset=Cluster.objects.all(), required=False, label=_('Clusters') ) tenant_group_id = DynamicModelMultipleChoiceField( queryset=TenantGroup.objects.all(), required=False,", "= forms.CharField( required=False, label=_('MIME type') ) file_extension = forms.CharField( required=False", "widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) ) new_window = forms.NullBooleanField( required=False, widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES", "label=_('Site groups') ) site_id = DynamicModelMultipleChoiceField( queryset=Site.objects.all(), required=False, label=_('Sites') )", "('q',)), ('Time', ('time_before', 'time_after')), ('Attributes', ('action', 'user_id', 'changed_object_type_id')), ) time_after", "from virtualization.models import Cluster, ClusterGroup, ClusterType __all__ = ( 'ConfigContextFilterForm',", ") device_type_id = DynamicModelMultipleChoiceField( queryset=DeviceType.objects.all(), required=False, label=_('Device types') ) role_id", "label=_('Clusters') ) tenant_group_id = DynamicModelMultipleChoiceField( queryset=TenantGroup.objects.all(), required=False, label=_('Tenant groups') )", "required=False, label=_('Has local config context data'), widget=StaticSelect( choices=BOOLEAN_WITH_BLANK_CHOICES ) )", "CustomFieldFilterForm(FilterForm): fieldsets = ( (None, ('q',)), ('Attributes', ('type', 'content_types', 'weight',", "'ConfigContextFilterForm', 'CustomFieldFilterForm', 'CustomLinkFilterForm', 'ExportTemplateFilterForm', 'JournalEntryFilterForm', 'LocalConfigContextFilterForm', 'ObjectChangeFilterForm', 'TagFilterForm', 'WebhookFilterForm', )", "'CustomFieldFilterForm', 'CustomLinkFilterForm', 'ExportTemplateFilterForm', 'JournalEntryFilterForm', 'LocalConfigContextFilterForm', 'ObjectChangeFilterForm', 'TagFilterForm', 'WebhookFilterForm', ) class", "FeatureQuery from netbox.forms.base import NetBoxModelFilterSetForm from tenancy.models import Tenant, TenantGroup", "widget=APISelectMultiple( api_url='/api/extras/content-types/', ) ) kind = forms.ChoiceField( choices=add_blank_choice(JournalEntryKindChoices), required=False, widget=StaticSelect()", "DynamicModelMultipleChoiceField( queryset=User.objects.all(), required=False, label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) ) assigned_object_type_id =", "fieldsets = ( (None, ('q',)), ('Attributes', ('content_type', 'enabled', 'new_window', 'weight')),", "fieldsets = ( (None, ('q',)), ('Attributes', ('content_types', 'http_method', 'enabled')), ('Events',", "('Attributes', ('assigned_object_type_id', 'kind')) ) created_after = forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker()", ") class ConfigContextFilterForm(FilterForm): fieldsets = ( (None, ('q', 'tag_id')), ('Location',", "Type'), widget=APISelectMultiple( api_url='/api/extras/content-types/', ) ) kind = forms.ChoiceField( choices=add_blank_choice(JournalEntryKindChoices), required=False,", "Platform, Region, Site, SiteGroup from extras.choices import * from extras.models", "created_after = forms.DateTimeField( required=False, label=_('After'), widget=DateTimePicker() ) created_before = forms.DateTimeField(", "import Cluster, ClusterGroup, ClusterType __all__ = ( 'ConfigContextFilterForm', 'CustomFieldFilterForm', 'CustomLinkFilterForm',", "('Attributes', ('content_type', 'mime_type', 'file_extension', 'as_attachment')), ) content_type = ContentTypeChoiceField( queryset=ContentType.objects.all(),", "label=_('User'), widget=APISelectMultiple( api_url='/api/users/users/', ) ) changed_object_type_id = DynamicModelMultipleChoiceField( queryset=ContentType.objects.all(), required=False,", "ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField, ) from" ]
[ "## License details see https://choosealicense.com/licenses/mit/ ##==== config section ======== #", "204 : sys.stderr.write(r.status_code) sys.stderr.write(r.content) # catch if input is no", "import time # load json from stdin try: myjson =", "#+ '\\n' # post data into time series database; http", "catch if input is no valid json except: sys.stderr.write('!!error: no", "series InfluxDB ## ## Writes data from smart meter to", "here - set db and token at least cly_base_url =", "key = config[obis] # set human readable field name value", "into time series database; http response should be 204 r", "\"token\", \"p\" : \"placeyourtokenhere\", \"precision\": \"ms\"} # assign readable field", "= requests.post(cly_base_url, params=cly_parameters, data=line) if r.status_code != 204 : sys.stderr.write(r.status_code)", "time series InfluxDB ## ## Writes data from smart meter", "at least cly_base_url = 'https://corlysis.com:8086/write' cly_parameters = { \"db\": \"energy\",", "2019 <NAME> ## License details see https://choosealicense.com/licenses/mit/ ##==== config section", "==== ##-- import libraries import json, sys, requests import requests", "myjson['data'][obis] # get value from smart meter line += key", "details see https://choosealicense.com/licenses/mit/ ##==== config section ======== # define corlysis", "params=cly_parameters, data=line) if r.status_code != 204 : sys.stderr.write(r.status_code) sys.stderr.write(r.content) #", "in myjson['data']: key = config[obis] # set human readable field", "cut off last comma line = line[:-1] # add timestamp", "\"p\" : \"placeyourtokenhere\", \"precision\": \"ms\"} # assign readable field names", "db and token at least cly_base_url = 'https://corlysis.com:8086/write' cly_parameters =", "section ==== no need to change lines below ==== ##--", "section ======== # define corlysis settings here - set db", "= line[:-1] # add timestamp as unix timestamp in ms", "- writes values to Corlysis time series InfluxDB ## ##", "need to configure your database and token ## in the", "## - writes values to Corlysis time series InfluxDB ##", "writes values to Corlysis time series InfluxDB ## ## Writes", "- set db and token at least cly_base_url = 'https://corlysis.com:8086/write'", "json from stdin try: myjson = json.load(sys.stdin) except: sys.stderr.write('!! error", "database; http response should be 204 r = requests.post(cly_base_url, params=cly_parameters,", "config section. ## ## [1] https://corlysis.com/ ##==== license section ========", "stdin try: myjson = json.load(sys.stdin) except: sys.stderr.write('!! error loading json')", "requests.post(cly_base_url, params=cly_parameters, data=line) if r.status_code != 204 : sys.stderr.write(r.status_code) sys.stderr.write(r.content)", "\"1.8.0\": \"Bezug\", \"2.8.0\": \"Einspeisung\", \"16.7.0\": \"Wirkleistung\" } ##==== code section", "and token at least cly_base_url = 'https://corlysis.com:8086/write' cly_parameters = {", "values to Corlysis time series InfluxDB ## ## Writes data", "= { \"1.8.0\": \"Bezug\", \"2.8.0\": \"Einspeisung\", \"16.7.0\": \"Wirkleistung\" } ##====", "define corlysis settings here - set db and token at", "## at Corlysis.com [1]. You need to configure your database", "# add key=value to insert line # cut off last", "sys.stderr.write(r.content) # catch if input is no valid json except:", "{ \"db\": \"energy\", \"u\" : \"token\", \"p\" : \"placeyourtokenhere\", \"precision\":", "no need to change lines below ==== ##-- import libraries", "value = myjson['data'][obis] # get value from smart meter line", "line # cut off last comma line = line[:-1] #", "the config section. ## ## [1] https://corlysis.com/ ##==== license section", "ms line += ' ' + str(int(time.time()*1000)) #+ '\\n' #", "= { \"db\": \"energy\", \"u\" : \"token\", \"p\" : \"placeyourtokenhere\",", "config section ======== # define corlysis settings here - set", "[1]. You need to configure your database and token ##", "\"Wirkleistung\" } ##==== code section ==== no need to change", "{ \"1.8.0\": \"Bezug\", \"2.8.0\": \"Einspeisung\", \"16.7.0\": \"Wirkleistung\" } ##==== code", "under MIT License: Copyright (C) 2019 <NAME> ## License details", "data from smart meter to time series database (InfluxDB) ##", "error loading json') exit(1) # decode json try: line =", "# set human readable field name value = myjson['data'][obis] #", "\"placeyourtokenhere\", \"precision\": \"ms\"} # assign readable field names config =", "## in the config section. ## ## [1] https://corlysis.com/ ##====", "add each meter value to line for obis in myjson['data']:", "json.load(sys.stdin) except: sys.stderr.write('!! error loading json') exit(1) # decode json", "from stdin ## - writes values to Corlysis time series", "token at least cly_base_url = 'https://corlysis.com:8086/write' cly_parameters = { \"db\":", "to insert line # cut off last comma line =", "'=' + str(value) + ',' # add key=value to insert", "## [1] https://corlysis.com/ ##==== license section ======== ## This code", "sys, requests import requests import time # load json from", "by sml_reader.py) from stdin ## - writes values to Corlysis", "## ## [1] https://corlysis.com/ ##==== license section ======== ## This", "add key=value to insert line # cut off last comma", "time # load json from stdin try: myjson = json.load(sys.stdin)", "stdin ## - writes values to Corlysis time series InfluxDB", "series database (InfluxDB) ## at Corlysis.com [1]. You need to", "section. ## ## [1] https://corlysis.com/ ##==== license section ======== ##", "You need to configure your database and token ## in", "to line for obis in myjson['data']: key = config[obis] #", "to Corlysis time series InfluxDB ## ## Writes data from", ": \"placeyourtokenhere\", \"precision\": \"ms\"} # assign readable field names config", "# assign readable field names config = { \"1.8.0\": \"Bezug\",", "exit(1) # decode json try: line = \"meter_data \" #", "http response should be 204 r = requests.post(cly_base_url, params=cly_parameters, data=line)", "204 r = requests.post(cly_base_url, params=cly_parameters, data=line) if r.status_code != 204", "json, sys, requests import requests import time # load json", "str(int(time.time()*1000)) #+ '\\n' # post data into time series database;", "for obis in myjson['data']: key = config[obis] # set human", "Copyright (C) 2019 <NAME> ## License details see https://choosealicense.com/licenses/mit/ ##====", "==== no need to change lines below ==== ##-- import", "line = line[:-1] # add timestamp as unix timestamp in", "human readable field name value = myjson['data'][obis] # get value", "+= ' ' + str(int(time.time()*1000)) #+ '\\n' # post data", "is under MIT License: Copyright (C) 2019 <NAME> ## License", "json try: line = \"meter_data \" # add each meter", "r.status_code != 204 : sys.stderr.write(r.status_code) sys.stderr.write(r.content) # catch if input", "sys.stderr.write(r.status_code) sys.stderr.write(r.content) # catch if input is no valid json", "License details see https://choosealicense.com/licenses/mit/ ##==== config section ======== # define", "try: myjson = json.load(sys.stdin) except: sys.stderr.write('!! error loading json') exit(1)", "database and token ## in the config section. ## ##", "Writes data from smart meter to time series database (InfluxDB)", "line for obis in myjson['data']: key = config[obis] # set", "requests import requests import time # load json from stdin", "response should be 204 r = requests.post(cly_base_url, params=cly_parameters, data=line) if", "try: line = \"meter_data \" # add each meter value", "change lines below ==== ##-- import libraries import json, sys,", "corlysis settings here - set db and token at least", "\"2.8.0\": \"Einspeisung\", \"16.7.0\": \"Wirkleistung\" } ##==== code section ==== no", "line[:-1] # add timestamp as unix timestamp in ms line", "if r.status_code != 204 : sys.stderr.write(r.status_code) sys.stderr.write(r.content) # catch if", "#!/usr/bin/python3 ## write2cly.py - reads json (generated by sml_reader.py) from", "<NAME> ## License details see https://choosealicense.com/licenses/mit/ ##==== config section ========", "is no valid json except: sys.stderr.write('!!error: no data block in", "section ======== ## This code is under MIT License: Copyright", "from stdin try: myjson = json.load(sys.stdin) except: sys.stderr.write('!! error loading", "myjson['data']: key = config[obis] # set human readable field name", "## write2cly.py - reads json (generated by sml_reader.py) from stdin", "cly_parameters = { \"db\": \"energy\", \"u\" : \"token\", \"p\" :", "below ==== ##-- import libraries import json, sys, requests import", "comma line = line[:-1] # add timestamp as unix timestamp", "get value from smart meter line += key + '='", "## Writes data from smart meter to time series database", "timestamp as unix timestamp in ms line += ' '", "requests import time # load json from stdin try: myjson", "\"u\" : \"token\", \"p\" : \"placeyourtokenhere\", \"precision\": \"ms\"} # assign", "} ##==== code section ==== no need to change lines", "meter value to line for obis in myjson['data']: key =", "= myjson['data'][obis] # get value from smart meter line +=", "settings here - set db and token at least cly_base_url", "post data into time series database; http response should be", "##==== code section ==== no need to change lines below", "config[obis] # set human readable field name value = myjson['data'][obis]", "license section ======== ## This code is under MIT License:", "libraries import json, sys, requests import requests import time #", "load json from stdin try: myjson = json.load(sys.stdin) except: sys.stderr.write('!!", "assign readable field names config = { \"1.8.0\": \"Bezug\", \"2.8.0\":", "\"db\": \"energy\", \"u\" : \"token\", \"p\" : \"placeyourtokenhere\", \"precision\": \"ms\"}", "key=value to insert line # cut off last comma line", "your database and token ## in the config section. ##", "write2cly.py - reads json (generated by sml_reader.py) from stdin ##", "data into time series database; http response should be 204", "to configure your database and token ## in the config", "should be 204 r = requests.post(cly_base_url, params=cly_parameters, data=line) if r.status_code", "##==== config section ======== # define corlysis settings here -", "timestamp in ms line += ' ' + str(int(time.time()*1000)) #+", "(generated by sml_reader.py) from stdin ## - writes values to", "https://choosealicense.com/licenses/mit/ ##==== config section ======== # define corlysis settings here", "need to change lines below ==== ##-- import libraries import", "# post data into time series database; http response should", "add timestamp as unix timestamp in ms line += '", "= config[obis] # set human readable field name value =", ": \"token\", \"p\" : \"placeyourtokenhere\", \"precision\": \"ms\"} # assign readable", "if input is no valid json except: sys.stderr.write('!!error: no data", "series database; http response should be 204 r = requests.post(cly_base_url,", "field names config = { \"1.8.0\": \"Bezug\", \"2.8.0\": \"Einspeisung\", \"16.7.0\":", "======== # define corlysis settings here - set db and", "set human readable field name value = myjson['data'][obis] # get", "code section ==== no need to change lines below ====", "to time series database (InfluxDB) ## at Corlysis.com [1]. You", "as unix timestamp in ms line += ' ' +", "' ' + str(int(time.time()*1000)) #+ '\\n' # post data into", "Corlysis.com [1]. You need to configure your database and token", "json (generated by sml_reader.py) from stdin ## - writes values", "# define corlysis settings here - set db and token", "',' # add key=value to insert line # cut off", "(InfluxDB) ## at Corlysis.com [1]. You need to configure your", "# load json from stdin try: myjson = json.load(sys.stdin) except:", "str(value) + ',' # add key=value to insert line #", "line += key + '=' + str(value) + ',' #", "[1] https://corlysis.com/ ##==== license section ======== ## This code is", "to change lines below ==== ##-- import libraries import json,", "import libraries import json, sys, requests import requests import time", "and token ## in the config section. ## ## [1]", "import json, sys, requests import requests import time # load", "= 'https://corlysis.com:8086/write' cly_parameters = { \"db\": \"energy\", \"u\" : \"token\",", "json') exit(1) # decode json try: line = \"meter_data \"", "License: Copyright (C) 2019 <NAME> ## License details see https://choosealicense.com/licenses/mit/", "smart meter line += key + '=' + str(value) +", "\"Einspeisung\", \"16.7.0\": \"Wirkleistung\" } ##==== code section ==== no need", "last comma line = line[:-1] # add timestamp as unix", "be 204 r = requests.post(cly_base_url, params=cly_parameters, data=line) if r.status_code !=", "database (InfluxDB) ## at Corlysis.com [1]. You need to configure", "time series database; http response should be 204 r =", "data=line) if r.status_code != 204 : sys.stderr.write(r.status_code) sys.stderr.write(r.content) # catch", "field name value = myjson['data'][obis] # get value from smart", "line += ' ' + str(int(time.time()*1000)) #+ '\\n' # post", "at Corlysis.com [1]. You need to configure your database and", "in ms line += ' ' + str(int(time.time()*1000)) #+ '\\n'", "'\\n' # post data into time series database; http response", "obis in myjson['data']: key = config[obis] # set human readable", "configure your database and token ## in the config section.", "in the config section. ## ## [1] https://corlysis.com/ ##==== license", "# catch if input is no valid json except: sys.stderr.write('!!error:", "from smart meter line += key + '=' + str(value)", "import requests import time # load json from stdin try:", "off last comma line = line[:-1] # add timestamp as", "input is no valid json except: sys.stderr.write('!!error: no data block", "reads json (generated by sml_reader.py) from stdin ## - writes", "##==== license section ======== ## This code is under MIT", "\"Bezug\", \"2.8.0\": \"Einspeisung\", \"16.7.0\": \"Wirkleistung\" } ##==== code section ====", "except: sys.stderr.write('!! error loading json') exit(1) # decode json try:", "InfluxDB ## ## Writes data from smart meter to time", "valid json except: sys.stderr.write('!!error: no data block in json') exit(2)", "names config = { \"1.8.0\": \"Bezug\", \"2.8.0\": \"Einspeisung\", \"16.7.0\": \"Wirkleistung\"", "line = \"meter_data \" # add each meter value to", "meter to time series database (InfluxDB) ## at Corlysis.com [1].", "loading json') exit(1) # decode json try: line = \"meter_data", "\"meter_data \" # add each meter value to line for", "see https://choosealicense.com/licenses/mit/ ##==== config section ======== # define corlysis settings", "= json.load(sys.stdin) except: sys.stderr.write('!! error loading json') exit(1) # decode", "config = { \"1.8.0\": \"Bezug\", \"2.8.0\": \"Einspeisung\", \"16.7.0\": \"Wirkleistung\" }", "+= key + '=' + str(value) + ',' # add", "https://corlysis.com/ ##==== license section ======== ## This code is under", "## This code is under MIT License: Copyright (C) 2019", "- reads json (generated by sml_reader.py) from stdin ## -", "##-- import libraries import json, sys, requests import requests import", "!= 204 : sys.stderr.write(r.status_code) sys.stderr.write(r.content) # catch if input is", "(C) 2019 <NAME> ## License details see https://choosealicense.com/licenses/mit/ ##==== config", "\" # add each meter value to line for obis", "no valid json except: sys.stderr.write('!!error: no data block in json')", "time series database (InfluxDB) ## at Corlysis.com [1]. You need", "Corlysis time series InfluxDB ## ## Writes data from smart", "each meter value to line for obis in myjson['data']: key", "token ## in the config section. ## ## [1] https://corlysis.com/", "+ ',' # add key=value to insert line # cut", "decode json try: line = \"meter_data \" # add each", "# add timestamp as unix timestamp in ms line +=", "unix timestamp in ms line += ' ' + str(int(time.time()*1000))", "+ str(int(time.time()*1000)) #+ '\\n' # post data into time series", "value to line for obis in myjson['data']: key = config[obis]", "= \"meter_data \" # add each meter value to line", "least cly_base_url = 'https://corlysis.com:8086/write' cly_parameters = { \"db\": \"energy\", \"u\"", "myjson = json.load(sys.stdin) except: sys.stderr.write('!! error loading json') exit(1) #", "set db and token at least cly_base_url = 'https://corlysis.com:8086/write' cly_parameters", "MIT License: Copyright (C) 2019 <NAME> ## License details see", "lines below ==== ##-- import libraries import json, sys, requests", "+ str(value) + ',' # add key=value to insert line", "' + str(int(time.time()*1000)) #+ '\\n' # post data into time", "value from smart meter line += key + '=' +", "r = requests.post(cly_base_url, params=cly_parameters, data=line) if r.status_code != 204 :", "# add each meter value to line for obis in", "readable field names config = { \"1.8.0\": \"Bezug\", \"2.8.0\": \"Einspeisung\",", "\"precision\": \"ms\"} # assign readable field names config = {", "cly_base_url = 'https://corlysis.com:8086/write' cly_parameters = { \"db\": \"energy\", \"u\" :", "## ## Writes data from smart meter to time series", "This code is under MIT License: Copyright (C) 2019 <NAME>", "# get value from smart meter line += key +", ": sys.stderr.write(r.status_code) sys.stderr.write(r.content) # catch if input is no valid", "from smart meter to time series database (InfluxDB) ## at", "\"16.7.0\": \"Wirkleistung\" } ##==== code section ==== no need to", "code is under MIT License: Copyright (C) 2019 <NAME> ##", "name value = myjson['data'][obis] # get value from smart meter", "meter line += key + '=' + str(value) + ','", "+ '=' + str(value) + ',' # add key=value to", "sys.stderr.write('!! error loading json') exit(1) # decode json try: line", "insert line # cut off last comma line = line[:-1]", "# cut off last comma line = line[:-1] # add", "smart meter to time series database (InfluxDB) ## at Corlysis.com", "sml_reader.py) from stdin ## - writes values to Corlysis time", "# decode json try: line = \"meter_data \" # add", "\"ms\"} # assign readable field names config = { \"1.8.0\":", "readable field name value = myjson['data'][obis] # get value from", "key + '=' + str(value) + ',' # add key=value", "======== ## This code is under MIT License: Copyright (C)", "'https://corlysis.com:8086/write' cly_parameters = { \"db\": \"energy\", \"u\" : \"token\", \"p\"", "\"energy\", \"u\" : \"token\", \"p\" : \"placeyourtokenhere\", \"precision\": \"ms\"} #" ]
[ "'NINFO', 'NS', 'NSEC', 'NSEC3', 'NSEC3PARAM', 'OPENPGPKEY', 'OPT', 'PTR', 'RP', 'RRSIG',", "ISC license # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. #", "classes.\"\"\" __all__ = [ 'AFSDB', 'AMTRELAY', 'AVC', 'CAA', 'CDNSKEY', 'CDS',", "distribute this software and its # documentation for any purpose", "CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. \"\"\"Class", "THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND", "WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL", "Inc. # # Permission to use, copy, modify, and distribute", "RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN", "WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER", "LOSS OF USE, DATA OR PROFITS, WHETHER IN AN #", "'MX', 'NINFO', 'NS', 'NSEC', 'NSEC3', 'NSEC3PARAM', 'OPENPGPKEY', 'OPT', 'PTR', 'RP',", "OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF", "'RRSIG', 'RT', 'SMIMEA', 'SOA', 'SPF', 'SSHFP', 'TKEY', 'TLSA', 'TSIG', 'TXT',", "that the above copyright notice and this permission notice #", "BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL", "OF THIS SOFTWARE. \"\"\"Class ANY (generic) rdata type classes.\"\"\" __all__", "SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT,", "this software and its # documentation for any purpose with", "THE USE OR PERFORMANCE OF THIS SOFTWARE. \"\"\"Class ANY (generic)", "# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES", "AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,", "SOFTWARE. \"\"\"Class ANY (generic) rdata type classes.\"\"\" __all__ = [", "[ 'AFSDB', 'AMTRELAY', 'AVC', 'CAA', 'CDNSKEY', 'CDS', 'CERT', 'CNAME', 'CSYNC',", "and distribute this software and its # documentation for any", "hereby granted, # provided that the above copyright notice and", "__all__ = [ 'AFSDB', 'AMTRELAY', 'AVC', 'CAA', 'CDNSKEY', 'CDS', 'CERT',", "its # documentation for any purpose with or without fee", "license # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. # #", "MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE", "2009-2011 Nominum, Inc. # # Permission to use, copy, modify,", "without fee is hereby granted, # provided that the above", "OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN", "# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. # # Permission", "PROVIDED \"AS IS\" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH", "INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING", "'DS', 'EUI48', 'EUI64', 'GPOS', 'HINFO', 'HIP', 'ISDN', 'LOC', 'MX', 'NINFO',", "DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING", "= [ 'AFSDB', 'AMTRELAY', 'AVC', 'CAA', 'CDNSKEY', 'CDS', 'CERT', 'CNAME',", "TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY", "(generic) rdata type classes.\"\"\" __all__ = [ 'AFSDB', 'AMTRELAY', 'AVC',", "'DNAME', 'DNSKEY', 'DS', 'EUI48', 'EUI64', 'GPOS', 'HINFO', 'HIP', 'ISDN', 'LOC',", "notice # appear in all copies. # # THE SOFTWARE", "with or without fee is hereby granted, # provided that", "DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF", "# # THE SOFTWARE IS PROVIDED \"AS IS\" AND NOMINUM", "for text of ISC license # Copyright (C) 2003-2007, 2009-2011", "PERFORMANCE OF THIS SOFTWARE. \"\"\"Class ANY (generic) rdata type classes.\"\"\"", "(C) 2003-2007, 2009-2011 Nominum, Inc. # # Permission to use,", "'AVC', 'CAA', 'CDNSKEY', 'CDS', 'CERT', 'CNAME', 'CSYNC', 'DLV', 'DNAME', 'DNSKEY',", "'NSEC3', 'NSEC3PARAM', 'OPENPGPKEY', 'OPT', 'PTR', 'RP', 'RRSIG', 'RT', 'SMIMEA', 'SOA',", "'AFSDB', 'AMTRELAY', 'AVC', 'CAA', 'CDNSKEY', 'CDS', 'CERT', 'CNAME', 'CSYNC', 'DLV',", "'HINFO', 'HIP', 'ISDN', 'LOC', 'MX', 'NINFO', 'NS', 'NSEC', 'NSEC3', 'NSEC3PARAM',", "of ISC license # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.", "NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE", "OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS", "'NS', 'NSEC', 'NSEC3', 'NSEC3PARAM', 'OPENPGPKEY', 'OPT', 'PTR', 'RP', 'RRSIG', 'RT',", "documentation for any purpose with or without fee is hereby", "ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA", "(C) Dnspython Contributors, see LICENSE for text of ISC license", "Contributors, see LICENSE for text of ISC license # Copyright", "# # Permission to use, copy, modify, and distribute this", "# provided that the above copyright notice and this permission", "this permission notice # appear in all copies. # #", "copy, modify, and distribute this software and its # documentation", "software and its # documentation for any purpose with or", "OR PERFORMANCE OF THIS SOFTWARE. \"\"\"Class ANY (generic) rdata type", "OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE", "# appear in all copies. # # THE SOFTWARE IS", "notice and this permission notice # appear in all copies.", "ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL", "SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS.", "# Permission to use, copy, modify, and distribute this software", "OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION", "TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH", "EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT,", "for any purpose with or without fee is hereby granted,", "above copyright notice and this permission notice # appear in", "IS\" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO", "Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. # # Permission to", "# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE", "use, copy, modify, and distribute this software and its #", "'CDS', 'CERT', 'CNAME', 'CSYNC', 'DLV', 'DNAME', 'DNSKEY', 'DS', 'EUI48', 'EUI64',", "'CAA', 'CDNSKEY', 'CDS', 'CERT', 'CNAME', 'CSYNC', 'DLV', 'DNAME', 'DNSKEY', 'DS',", "\"\"\"Class ANY (generic) rdata type classes.\"\"\" __all__ = [ 'AFSDB',", "provided that the above copyright notice and this permission notice", "FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR #", "OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION", "'CERT', 'CNAME', 'CSYNC', 'DLV', 'DNAME', 'DNSKEY', 'DS', 'EUI48', 'EUI64', 'GPOS',", "the above copyright notice and this permission notice # appear", "<reponame>Ashiq5/dnspython # Copyright (C) Dnspython Contributors, see LICENSE for text", "SOFTWARE IS PROVIDED \"AS IS\" AND NOMINUM DISCLAIMS ALL WARRANTIES", "'NSEC', 'NSEC3', 'NSEC3PARAM', 'OPENPGPKEY', 'OPT', 'PTR', 'RP', 'RRSIG', 'RT', 'SMIMEA',", "'NSEC3PARAM', 'OPENPGPKEY', 'OPT', 'PTR', 'RP', 'RRSIG', 'RT', 'SMIMEA', 'SOA', 'SPF',", "ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT", "'CSYNC', 'DLV', 'DNAME', 'DNSKEY', 'DS', 'EUI48', 'EUI64', 'GPOS', 'HINFO', 'HIP',", "'DNSKEY', 'DS', 'EUI48', 'EUI64', 'GPOS', 'HINFO', 'HIP', 'ISDN', 'LOC', 'MX',", "SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES #", "copyright notice and this permission notice # appear in all", "copies. # # THE SOFTWARE IS PROVIDED \"AS IS\" AND", "rdata type classes.\"\"\" __all__ = [ 'AFSDB', 'AMTRELAY', 'AVC', 'CAA',", "DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT,", "permission notice # appear in all copies. # # THE", "'CNAME', 'CSYNC', 'DLV', 'DNAME', 'DNSKEY', 'DS', 'EUI48', 'EUI64', 'GPOS', 'HINFO',", "NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL,", "'SMIMEA', 'SOA', 'SPF', 'SSHFP', 'TKEY', 'TLSA', 'TSIG', 'TXT', 'URI', 'X25',", "# Copyright (C) Dnspython Contributors, see LICENSE for text of", "# THE SOFTWARE IS PROVIDED \"AS IS\" AND NOMINUM DISCLAIMS", "Copyright (C) Dnspython Contributors, see LICENSE for text of ISC", "any purpose with or without fee is hereby granted, #", "NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR", "# documentation for any purpose with or without fee is", "and this permission notice # appear in all copies. #", "WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. \"\"\"Class ANY", "'DLV', 'DNAME', 'DNSKEY', 'DS', 'EUI48', 'EUI64', 'GPOS', 'HINFO', 'HIP', 'ISDN',", "'EUI64', 'GPOS', 'HINFO', 'HIP', 'ISDN', 'LOC', 'MX', 'NINFO', 'NS', 'NSEC',", "ARISING OUT # OF OR IN CONNECTION WITH THE USE", "THIS SOFTWARE. \"\"\"Class ANY (generic) rdata type classes.\"\"\" __all__ =", "type classes.\"\"\" __all__ = [ 'AFSDB', 'AMTRELAY', 'AVC', 'CAA', 'CDNSKEY',", "'LOC', 'MX', 'NINFO', 'NS', 'NSEC', 'NSEC3', 'NSEC3PARAM', 'OPENPGPKEY', 'OPT', 'PTR',", "IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS", "'PTR', 'RP', 'RRSIG', 'RT', 'SMIMEA', 'SOA', 'SPF', 'SSHFP', 'TKEY', 'TLSA',", "IS PROVIDED \"AS IS\" AND NOMINUM DISCLAIMS ALL WARRANTIES #", "2003-2007, 2009-2011 Nominum, Inc. # # Permission to use, copy,", "'CDNSKEY', 'CDS', 'CERT', 'CNAME', 'CSYNC', 'DLV', 'DNAME', 'DNSKEY', 'DS', 'EUI48',", "OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM", "PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR", "FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN", "# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE", "'ISDN', 'LOC', 'MX', 'NINFO', 'NS', 'NSEC', 'NSEC3', 'NSEC3PARAM', 'OPENPGPKEY', 'OPT',", "'AMTRELAY', 'AVC', 'CAA', 'CDNSKEY', 'CDS', 'CERT', 'CNAME', 'CSYNC', 'DLV', 'DNAME',", "'GPOS', 'HINFO', 'HIP', 'ISDN', 'LOC', 'MX', 'NINFO', 'NS', 'NSEC', 'NSEC3',", "FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR", "'RP', 'RRSIG', 'RT', 'SMIMEA', 'SOA', 'SPF', 'SSHFP', 'TKEY', 'TLSA', 'TSIG',", "purpose with or without fee is hereby granted, # provided", "'EUI48', 'EUI64', 'GPOS', 'HINFO', 'HIP', 'ISDN', 'LOC', 'MX', 'NINFO', 'NS',", "all copies. # # THE SOFTWARE IS PROVIDED \"AS IS\"", "ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE", "THE SOFTWARE IS PROVIDED \"AS IS\" AND NOMINUM DISCLAIMS ALL", "Dnspython Contributors, see LICENSE for text of ISC license #", "IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.", "see LICENSE for text of ISC license # Copyright (C)", "REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF #", "ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES", "LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES", "USE OR PERFORMANCE OF THIS SOFTWARE. \"\"\"Class ANY (generic) rdata", "and its # documentation for any purpose with or without", "fee is hereby granted, # provided that the above copyright", "modify, and distribute this software and its # documentation for", "NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR", "# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY", "CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF", "'SOA', 'SPF', 'SSHFP', 'TKEY', 'TLSA', 'TSIG', 'TXT', 'URI', 'X25', ]", "'OPENPGPKEY', 'OPT', 'PTR', 'RP', 'RRSIG', 'RT', 'SMIMEA', 'SOA', 'SPF', 'SSHFP',", "in all copies. # # THE SOFTWARE IS PROVIDED \"AS", "USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF", "to use, copy, modify, and distribute this software and its", "granted, # provided that the above copyright notice and this", "AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS", "OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM", "or without fee is hereby granted, # provided that the", "IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT", "Permission to use, copy, modify, and distribute this software and", "IN NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY", "'HIP', 'ISDN', 'LOC', 'MX', 'NINFO', 'NS', 'NSEC', 'NSEC3', 'NSEC3PARAM', 'OPENPGPKEY',", "WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF", "text of ISC license # Copyright (C) 2003-2007, 2009-2011 Nominum,", "'RT', 'SMIMEA', 'SOA', 'SPF', 'SSHFP', 'TKEY', 'TLSA', 'TSIG', 'TXT', 'URI',", "LICENSE for text of ISC license # Copyright (C) 2003-2007,", "# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING", "CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS", "DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER", "DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR", "Nominum, Inc. # # Permission to use, copy, modify, and", "WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER", "AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR", "OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT #", "ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO", "\"AS IS\" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD", "INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN", "OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE,", "# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,", "ANY (generic) rdata type classes.\"\"\" __all__ = [ 'AFSDB', 'AMTRELAY',", "appear in all copies. # # THE SOFTWARE IS PROVIDED", "WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED", "is hereby granted, # provided that the above copyright notice", "OUT # OF OR IN CONNECTION WITH THE USE OR", "'OPT', 'PTR', 'RP', 'RRSIG', 'RT', 'SMIMEA', 'SOA', 'SPF', 'SSHFP', 'TKEY'," ]
[ "import itertools import sys ######################################################################## ######################################################################## # import additional python-library", "dev_data or eval_data id_name : str id of wav file", "= test_file_list_generator(target_dir, id_str) # setup anomaly score file path anomaly_score_csv", "============\") if mode: # calculate averages for AUCs and pAUCs", "for AUCs and pAUCs averaged_performance = numpy.mean(numpy.array(performance, dtype=float), axis=0) csv_lines.append([\"Average\"]", "# main 01_test.py ######################################################################## if __name__ == \"__main__\": # check", "AUCs and pAUCs averaged_performance = numpy.mean(numpy.array(performance, dtype=float), axis=0) csv_lines.append([\"Average\"] +", "directory name prefix_anomaly : str (default=\"anomaly\") anomaly directory name ext", "All right reserved. \"\"\" ######################################################################## # import default python-library ########################################################################", "num : {num}\".format(num=len(files))) if len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n========================================\") #", "directory for idx, target_dir in enumerate(dirs): print(\"\\n===========================\") print(\"[{idx}/{total}] {dirname}\".format(dirname=target_dir, idx=idx+1,", "os.path.split(target_dir)[1] print(\"============== MODEL LOAD ==============\") # set model path model_file", "mode is \"evaluation\": test_files : list [ str ] file", "auc = metrics.roc_auc_score(y_true, y_pred) p_auc = metrics.roc_auc_score(y_true, y_pred, max_fpr=param[\"max_fpr\"]) csv_lines.append([id_str.split(\"_\",", "com import keras_model ######################################################################## ######################################################################## # load parameter.yaml ######################################################################## param", "not os.path.exists(model_file): com.logger.error(\"{} model not found \".format(machine_type)) sys.exit(-1) model =", "in test_files] for file_idx, file_path in tqdm(enumerate(test_files), total=len(test_files)): try: data", "= get_machine_id_list_for_test(target_dir) print(machine_id_list) for id_str in machine_id_list: # load test", "sys ######################################################################## ######################################################################## # import additional python-library ######################################################################## import numpy", "open(save_file_path, \"w\", newline=\"\") as f: writer = csv.writer(f, lineterminator='\\n') writer.writerows(save_data)", "\"\"\" target_dir : str base directory path of \"dev_data\" or", "test data prefix_normal : str (default=\"normal\") normal directory name prefix_anomaly", "= [] # loop of the base directory for idx,", "[ str ] file list for test test_labels : list", "for test \"\"\" com.logger.info(\"target_dir : {}\".format(target_dir+\"_\"+id_name)) # development if mode:", "sys.exit(-1) model = keras_model.load_model(model_file) model.summary() if mode: # results by", "model.predict(data)), axis=1) y_pred[file_idx] = numpy.mean(errors) anomaly_score_list.append([os.path.basename(file_path), y_pred[file_idx]]) except: com.logger.error(\"file broken!!:", "glob import csv import re import itertools import sys ########################################################################", "test @author <NAME>, <NAME>, and <NAME> (Hitachi Ltd.) Copyright (C)", "import default python-library ######################################################################## import os import glob import csv", "ext=ext))) labels = None com.logger.info(\"test_file num : {num}\".format(num=len(files))) if len(files)", "in machine_id_list: # load test file test_files, y_true = test_file_list_generator(target_dir,", "exist_ok=True) # load base directory dirs = com.select_dirs(param=param, mode=mode) #", "\"{model}/model_{machine_type}.hdf5\".format(model=param[\"model_directory\"], machine_type=machine_type) # load model file if not os.path.exists(model_file): com.logger.error(\"{}", "tqdm from sklearn import metrics # original lib import common", "__name__ == \"__main__\": # check mode # \"development\": mode ==", "model not found \".format(machine_type)) sys.exit(-1) model = keras_model.load_model(model_file) model.summary() if", "for test @author <NAME>, <NAME>, and <NAME> (Hitachi Ltd.) Copyright", "test_file_list_generator(target_dir, id_name, dir_name=\"test\", prefix_normal=\"normal\", prefix_anomaly=\"anomaly\", ext=\"json\"): \"\"\" target_dir : str", "tqdm import tqdm from sklearn import metrics # original lib", "file list for test test_labels : list [ boolean ]", "{}\".format(auc)) com.logger.info(\"pAUC : {}\".format(p_auc)) print(\"\\n============ END OF TEST FOR A", "# save anomaly score save_csv(save_file_path=anomaly_score_csv, save_data=anomaly_score_list) com.logger.info(\"anomaly score result ->", "or eval_data id_name : str id of wav file in", "(default=\"wav\") file extension of audio files return : if the", "list(averaged_performance)) csv_lines.append([]) if mode: # output results result_path = \"{result}/{file_name}\".format(result=param[\"result_directory\"],", "\"\"\" @file 01_test.py @brief Script for test @author <NAME>, <NAME>,", "python-library ######################################################################## import os import glob import csv import re", ": str (default=\"wav\") file extension of audio files return :", "directory containing test data ext : str (default=\"wav) file extension", "to lists auc = metrics.roc_auc_score(y_true, y_pred) p_auc = metrics.roc_auc_score(y_true, y_pred,", "score save_csv(save_file_path=anomaly_score_csv, save_data=anomaly_score_list) com.logger.info(\"anomaly score result -> {}\".format(anomaly_score_csv)) if mode:", "save anomaly score save_csv(save_file_path=anomaly_score_csv, save_data=anomaly_score_list) com.logger.info(\"anomaly score result -> {}\".format(anomaly_score_csv))", "[] print(\"\\n============== BEGIN TEST FOR A MACHINE ID ==============\") y_pred", "0: com.logger.exception(\"no_wav_file!!\") print(\"\\n========================================\") # evaluation else: files = sorted( glob.glob(\"{dir}/{dir_name}/*{id_name}*.{ext}\".format(dir=target_dir,", "######################################################################## if __name__ == \"__main__\": # check mode # \"development\":", "if mode: # calculate averages for AUCs and pAUCs averaged_performance", "0/1 if the mode is \"evaluation\": test_files : list [", "import common as com import keras_model ######################################################################## ######################################################################## # load", "= [] print(\"\\n============== BEGIN TEST FOR A MACHINE ID ==============\")", "by type csv_lines.append([machine_type]) csv_lines.append([\"id\", \"AUC\", \"pAUC\"]) performance = [] machine_id_list", "anomaly score save_csv(save_file_path=anomaly_score_csv, save_data=anomaly_score_list) com.logger.info(\"anomaly score result -> {}\".format(anomaly_score_csv)) if", "set model path model_file = \"{model}/model_{machine_type}.hdf5\".format(model=param[\"model_directory\"], machine_type=machine_type) # load model", "csv for AUC and pAUC csv_lines = [] # loop", "# set model path model_file = \"{model}/model_{machine_type}.hdf5\".format(model=param[\"model_directory\"], machine_type=machine_type) # load", "path anomaly_score_csv = \"{result}/anomaly_score_{machine_type}_{id_str}.csv\".format( result=param[\"result_directory\"], machine_type=machine_type, id_str=id_str) anomaly_score_list = []", "# from import from tqdm import tqdm from sklearn import", "y_pred, max_fpr=param[\"max_fpr\"]) csv_lines.append([id_str.split(\"_\", 1)[1], auc, p_auc]) performance.append([auc, p_auc]) com.logger.info(\"AUC :", "prefix_normal : str (default=\"normal\") normal directory name prefix_anomaly : str", "com.logger.info(\"AUC : {}\".format(auc)) com.logger.info(\"pAUC : {}\".format(p_auc)) print(\"\\n============ END OF TEST", "com.logger.info(\"test_file num : {num}\".format(num=len(files))) if len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n========================================\")", "<NAME>, <NAME>, and <NAME> (Hitachi Ltd.) Copyright (C) 2020 Hitachi,", "######################################################################## def save_csv(save_file_path, save_data): with open(save_file_path, \"w\", newline=\"\") as f:", "str ] file list for test test_labels : list [", "######################################################################## # load parameter.yaml ######################################################################## param = com.yaml_load() ####################################################################### ########################################################################", "test test_labels : list [ boolean ] label info. list", "base directory path of the dev_data or eval_data id_name :", "(default=\"test\") directory containing test data prefix_normal : str (default=\"normal\") normal", "# load test file test_files, y_true = test_file_list_generator(target_dir, id_str) #", "{}\".format(target_dir+\"_\"+id_name)) # development if mode: normal_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name,", "# \"evaluation\": mode == False mode = com.command_line_chk() if mode", "] list of machine IDs extracted from the names of", "lib import common as com import keras_model ######################################################################## ######################################################################## #", "numpy.mean(numpy.array(performance, dtype=float), axis=0) csv_lines.append([\"Average\"] + list(averaged_performance)) csv_lines.append([]) if mode: #", "test \"\"\" com.logger.info(\"target_dir : {}\".format(target_dir+\"_\"+id_name)) # development if mode: normal_files", "= sorted(glob.glob(dir_path)) # extract id machine_id_list = sorted(list(set(itertools.chain.from_iterable( [re.findall('id_[0-9][0-9]', ext_id)", "for AUC and pAUC csv_lines = [] # loop of", "numpy # from import from tqdm import tqdm from sklearn", "mode: # append AUC and pAUC to lists auc =", "import keras_model ######################################################################## ######################################################################## # load parameter.yaml ######################################################################## param =", "files return : if the mode is \"development\": test_files :", "MACHINE ID ============\") if mode: # calculate averages for AUCs", "com.command_line_chk() if mode is None: sys.exit(-1) # make output result", "score result -> {}\".format(anomaly_score_csv)) if mode: # append AUC and", "{}\".format(p_auc)) print(\"\\n============ END OF TEST FOR A MACHINE ID ============\")", ": str (default=\"anomaly\") anomaly directory name ext : str (default=\"wav\")", "anomaly_score_list.append([os.path.basename(file_path), y_pred[file_idx]]) except: com.logger.error(\"file broken!!: {}\".format(file_path)) # save anomaly score", "######################################################################## ######################################################################## # load parameter.yaml ######################################################################## param = com.yaml_load() #######################################################################", "######################################################################## ######################################################################## # main 01_test.py ######################################################################## if __name__ == \"__main__\":", "= numpy.mean(numpy.array(performance, dtype=float), axis=0) csv_lines.append([\"Average\"] + list(averaged_performance)) csv_lines.append([]) if mode:", "01_test.py ######################################################################## if __name__ == \"__main__\": # check mode #", "else: files = sorted( glob.glob(\"{dir}/{dir_name}/*{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, id_name=id_name, ext=ext))) labels =", "None: sys.exit(-1) # make output result directory os.makedirs(param[\"result_directory\"], exist_ok=True) #", "(default=\"normal\") normal directory name prefix_anomaly : str (default=\"anomaly\") anomaly directory", "com.logger.info(\"anomaly score result -> {}\".format(anomaly_score_csv)) if mode: # append AUC", "of the base directory for idx, target_dir in enumerate(dirs): print(\"\\n===========================\")", "pAUC to lists auc = metrics.roc_auc_score(y_true, y_pred) p_auc = metrics.roc_auc_score(y_true,", "len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n========================================\") # evaluation else: files =", "sorted( glob.glob(\"{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_normal=prefix_normal, id_name=id_name, ext=ext))) normal_labels = numpy.zeros(len(normal_files)) anomaly_files", "if the mode is \"development\": test_files : list [ str", "id machine_id_list = sorted(list(set(itertools.chain.from_iterable( [re.findall('id_[0-9][0-9]', ext_id) for ext_id in file_paths]))))", "AUC and pAUC to lists auc = metrics.roc_auc_score(y_true, y_pred) p_auc", "str id of wav file in <<test_dir_name>> directory dir_name :", "anomaly_labels = numpy.ones(len(anomaly_files)) files = numpy.concatenate((normal_files, anomaly_files), axis=0) labels =", "# calculate averages for AUCs and pAUCs averaged_performance = numpy.mean(numpy.array(performance,", "machine_type = os.path.split(target_dir)[1] print(\"============== MODEL LOAD ==============\") # set model", "files \"\"\" # create test files dir_path = os.path.abspath(\"{dir}/{dir_name}/*.{ext}\".format(dir=target_dir, dir_name=dir_name,", "] file list for test \"\"\" com.logger.info(\"target_dir : {}\".format(target_dir+\"_\"+id_name)) #", "\"dev_data\" or \"eval_data\" test_dir_name : str (default=\"test\") directory containing test", "######################################################################## import numpy # from import from tqdm import tqdm", "of wav file in <<test_dir_name>> directory dir_name : str (default=\"test\")", "numpy.concatenate((normal_files, anomaly_files), axis=0) labels = numpy.concatenate((normal_labels, anomaly_labels), axis=0) com.logger.info(\"test_file num", "= \"{model}/model_{machine_type}.hdf5\".format(model=param[\"model_directory\"], machine_type=machine_type) # load model file if not os.path.exists(model_file):", "save_data): with open(save_file_path, \"w\", newline=\"\") as f: writer = csv.writer(f,", "output result directory os.makedirs(param[\"result_directory\"], exist_ok=True) # load base directory dirs", "loop of the base directory for idx, target_dir in enumerate(dirs):", "enumerate(dirs): print(\"\\n===========================\") print(\"[{idx}/{total}] {dirname}\".format(dirname=target_dir, idx=idx+1, total=len(dirs))) machine_type = os.path.split(target_dir)[1] print(\"==============", ": list [ str ] file list for test \"\"\"", "A MACHINE ID ==============\") y_pred = [0. for k in", ": str id of wav file in <<test_dir_name>> directory dir_name", "frames=param[\"feature\"][\"frames\"], n_fft=param[\"feature\"][\"n_fft\"], hop_length=param[\"feature\"][\"hop_length\"], power=param[\"feature\"][\"power\"]) errors = numpy.mean(numpy.square(data - model.predict(data)), axis=1)", "base directory for idx, target_dir in enumerate(dirs): print(\"\\n===========================\") print(\"[{idx}/{total}] {dirname}\".format(dirname=target_dir,", "ext=ext))) normal_labels = numpy.zeros(len(normal_files)) anomaly_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_anomaly=prefix_anomaly,", "print(\"\\n========================================\") # evaluation else: files = sorted( glob.glob(\"{dir}/{dir_name}/*{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, id_name=id_name,", "type csv_lines.append([machine_type]) csv_lines.append([\"id\", \"AUC\", \"pAUC\"]) performance = [] machine_id_list =", "\"\"\" com.logger.info(\"target_dir : {}\".format(target_dir+\"_\"+id_name)) # development if mode: normal_files =", "prefix_normal=prefix_normal, id_name=id_name, ext=ext))) normal_labels = numpy.zeros(len(normal_files)) anomaly_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}\".format(dir=target_dir,", "# output results result_path = \"{result}/{file_name}\".format(result=param[\"result_directory\"], file_name=param[\"result_file\"]) com.logger.info(\"AUC and pAUC", "BEGIN TEST FOR A MACHINE ID ==============\") y_pred = [0.", "<gh_stars>1-10 \"\"\" @file 01_test.py @brief Script for test @author <NAME>,", "data prefix_normal : str (default=\"normal\") normal directory name prefix_anomaly :", "str (default=\"test\") directory containing test data ext : str (default=\"wav)", "is \"evaluation\": test_files : list [ str ] file list", "machine_type=machine_type, id_str=id_str) anomaly_score_list = [] print(\"\\n============== BEGIN TEST FOR A", "dir_name=\"test\", ext=\"json\"): \"\"\" target_dir : str base directory path of", "csv_lines.append([\"Average\"] + list(averaged_performance)) csv_lines.append([]) if mode: # output results result_path", "01_test.py @brief Script for test @author <NAME>, <NAME>, and <NAME>", "# def ######################################################################## def save_csv(save_file_path, save_data): with open(save_file_path, \"w\", newline=\"\")", "= None com.logger.info(\"test_file num : {num}\".format(num=len(files))) if len(files) == 0:", "\"development\": mode == True # \"evaluation\": mode == False mode", "files return : machine_id_list : list [ str ] list", "create test files dir_path = os.path.abspath(\"{dir}/{dir_name}/*.{ext}\".format(dir=target_dir, dir_name=dir_name, ext=ext)) file_paths =", "== True # \"evaluation\": mode == False mode = com.command_line_chk()", "reserved. \"\"\" ######################################################################## # import default python-library ######################################################################## import os", "\"\"\" target_dir : str base directory path of the dev_data", "p_auc]) com.logger.info(\"AUC : {}\".format(auc)) com.logger.info(\"pAUC : {}\".format(p_auc)) print(\"\\n============ END OF", "directory name ext : str (default=\"wav\") file extension of audio", "is \"development\": test_files : list [ str ] file list", "= numpy.mean(errors) anomaly_score_list.append([os.path.basename(file_path), y_pred[file_idx]]) except: com.logger.error(\"file broken!!: {}\".format(file_path)) # save", "= sorted( glob.glob(\"{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_normal=prefix_normal, id_name=id_name, ext=ext))) normal_labels = numpy.zeros(len(normal_files))", "# import additional python-library ######################################################################## import numpy # from import", "and pAUC csv_lines = [] # loop of the base", "######################################################################## # import default python-library ######################################################################## import os import glob", "extension of audio files return : if the mode is", "initialize lines in csv for AUC and pAUC csv_lines =", "data ext : str (default=\"wav) file extension of audio files", "TEST FOR A MACHINE ID ==============\") y_pred = [0. for", "len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n=========================================\") return files, labels ######################################################################## ########################################################################", "file_paths])))) return machine_id_list def test_file_list_generator(target_dir, id_name, dir_name=\"test\", prefix_normal=\"normal\", prefix_anomaly=\"anomaly\", ext=\"json\"):", "== 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n========================================\") # evaluation else: files = sorted(", "print(\"\\n============ END OF TEST FOR A MACHINE ID ============\") if", "FOR A MACHINE ID ==============\") y_pred = [0. for k", "######################################################################## import os import glob import csv import re import", "broken!!: {}\".format(file_path)) # save anomaly score save_csv(save_file_path=anomaly_score_csv, save_data=anomaly_score_list) com.logger.info(\"anomaly score", "lineterminator='\\n') writer.writerows(save_data) def get_machine_id_list_for_test(target_dir, dir_name=\"test\", ext=\"json\"): \"\"\" target_dir : str", "# load parameter.yaml ######################################################################## param = com.yaml_load() ####################################################################### ######################################################################## #", "n_fft=param[\"feature\"][\"n_fft\"], hop_length=param[\"feature\"][\"hop_length\"], power=param[\"feature\"][\"power\"]) errors = numpy.mean(numpy.square(data - model.predict(data)), axis=1) y_pred[file_idx]", "\"evaluation\": test_files : list [ str ] file list for", "] file list for test test_labels : list [ boolean", "common as com import keras_model ######################################################################## ######################################################################## # load parameter.yaml", "label info. list for test * normal/anomaly = 0/1 if", "file test_files, y_true = test_file_list_generator(target_dir, id_str) # setup anomaly score", "boolean ] label info. list for test * normal/anomaly =", "of audio files return : machine_id_list : list [ str", "for idx, target_dir in enumerate(dirs): print(\"\\n===========================\") print(\"[{idx}/{total}] {dirname}\".format(dirname=target_dir, idx=idx+1, total=len(dirs)))", "MACHINE ID ==============\") y_pred = [0. for k in test_files]", "print(\"\\n===========================\") print(\"[{idx}/{total}] {dirname}\".format(dirname=target_dir, idx=idx+1, total=len(dirs))) machine_type = os.path.split(target_dir)[1] print(\"============== MODEL", "test_files : list [ str ] file list for test", "str ] list of machine IDs extracted from the names", "try: data = com.file_to_vector_array(file_path, n_mels=param[\"feature\"][\"n_mels\"], frames=param[\"feature\"][\"frames\"], n_fft=param[\"feature\"][\"n_fft\"], hop_length=param[\"feature\"][\"hop_length\"], power=param[\"feature\"][\"power\"]) errors", "ext_id in file_paths])))) return machine_id_list def test_file_list_generator(target_dir, id_name, dir_name=\"test\", prefix_normal=\"normal\",", ": {num}\".format(num=len(files))) if len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n========================================\") # evaluation", "= sorted( glob.glob(\"{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_anomaly=prefix_anomaly, id_name=id_name, ext=ext))) anomaly_labels = numpy.ones(len(anomaly_files))", "with open(save_file_path, \"w\", newline=\"\") as f: writer = csv.writer(f, lineterminator='\\n')", "averaged_performance = numpy.mean(numpy.array(performance, dtype=float), axis=0) csv_lines.append([\"Average\"] + list(averaged_performance)) csv_lines.append([]) if", "path of \"dev_data\" or \"eval_data\" test_dir_name : str (default=\"test\") directory", "itertools import sys ######################################################################## ######################################################################## # import additional python-library ########################################################################", "numpy.concatenate((normal_labels, anomaly_labels), axis=0) com.logger.info(\"test_file num : {num}\".format(num=len(files))) if len(files) ==", "mode == False mode = com.command_line_chk() if mode is None:", "files = numpy.concatenate((normal_files, anomaly_files), axis=0) labels = numpy.concatenate((normal_labels, anomaly_labels), axis=0)", "com.logger.exception(\"no_wav_file!!\") print(\"\\n=========================================\") return files, labels ######################################################################## ######################################################################## # main 01_test.py", "parameter.yaml ######################################################################## param = com.yaml_load() ####################################################################### ######################################################################## # def ########################################################################", "machine_id_list : list [ str ] list of machine IDs", "<NAME>, and <NAME> (Hitachi Ltd.) Copyright (C) 2020 Hitachi, Ltd.", "dtype=float), axis=0) csv_lines.append([\"Average\"] + list(averaged_performance)) csv_lines.append([]) if mode: # output", "save_csv(save_file_path=anomaly_score_csv, save_data=anomaly_score_list) com.logger.info(\"anomaly score result -> {}\".format(anomaly_score_csv)) if mode: #", "Hitachi, Ltd. All right reserved. \"\"\" ######################################################################## # import default", "if mode: # output results result_path = \"{result}/{file_name}\".format(result=param[\"result_directory\"], file_name=param[\"result_file\"]) com.logger.info(\"AUC", "import numpy # from import from tqdm import tqdm from", "id_name=id_name, ext=ext))) normal_labels = numpy.zeros(len(normal_files)) anomaly_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name,", "as f: writer = csv.writer(f, lineterminator='\\n') writer.writerows(save_data) def get_machine_id_list_for_test(target_dir, dir_name=\"test\",", "original lib import common as com import keras_model ######################################################################## ########################################################################", "test_files, y_true = test_file_list_generator(target_dir, id_str) # setup anomaly score file", "import from tqdm import tqdm from sklearn import metrics #", "model_file = \"{model}/model_{machine_type}.hdf5\".format(model=param[\"model_directory\"], machine_type=machine_type) # load model file if not", "AUC and pAUC csv_lines = [] # loop of the", "n_mels=param[\"feature\"][\"n_mels\"], frames=param[\"feature\"][\"frames\"], n_fft=param[\"feature\"][\"n_fft\"], hop_length=param[\"feature\"][\"hop_length\"], power=param[\"feature\"][\"power\"]) errors = numpy.mean(numpy.square(data - model.predict(data)),", "1)[1], auc, p_auc]) performance.append([auc, p_auc]) com.logger.info(\"AUC : {}\".format(auc)) com.logger.info(\"pAUC :", "score file path anomaly_score_csv = \"{result}/anomaly_score_{machine_type}_{id_str}.csv\".format( result=param[\"result_directory\"], machine_type=machine_type, id_str=id_str) anomaly_score_list", "(default=\"anomaly\") anomaly directory name ext : str (default=\"wav\") file extension", "return files, labels ######################################################################## ######################################################################## # main 01_test.py ######################################################################## if", "mode == True # \"evaluation\": mode == False mode =", "{}\".format(file_path)) # save anomaly score save_csv(save_file_path=anomaly_score_csv, save_data=anomaly_score_list) com.logger.info(\"anomaly score result", "normal/anomaly = 0/1 if the mode is \"evaluation\": test_files :", "if mode is None: sys.exit(-1) # make output result directory", "the names of test files \"\"\" # create test files", "development if mode: normal_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_normal=prefix_normal, id_name=id_name,", "######################################################################## # main 01_test.py ######################################################################## if __name__ == \"__main__\": #", "mode=mode) # initialize lines in csv for AUC and pAUC", "sys.exit(-1) # make output result directory os.makedirs(param[\"result_directory\"], exist_ok=True) # load", "ext=\"json\"): \"\"\" target_dir : str base directory path of \"dev_data\"", "y_pred[file_idx] = numpy.mean(errors) anomaly_score_list.append([os.path.basename(file_path), y_pred[file_idx]]) except: com.logger.error(\"file broken!!: {}\".format(file_path)) #", "if not os.path.exists(model_file): com.logger.error(\"{} model not found \".format(machine_type)) sys.exit(-1) model", "# load model file if not os.path.exists(model_file): com.logger.error(\"{} model not", "\"{result}/{file_name}\".format(result=param[\"result_directory\"], file_name=param[\"result_file\"]) com.logger.info(\"AUC and pAUC results -> {}\".format(result_path)) save_csv(save_file_path=result_path, save_data=csv_lines)", "performance.append([auc, p_auc]) com.logger.info(\"AUC : {}\".format(auc)) com.logger.info(\"pAUC : {}\".format(p_auc)) print(\"\\n============ END", "anomaly_labels), axis=0) com.logger.info(\"test_file num : {num}\".format(num=len(files))) if len(files) == 0:", "print(machine_id_list) for id_str in machine_id_list: # load test file test_files,", "2020 Hitachi, Ltd. All right reserved. \"\"\" ######################################################################## # import", "print(\"\\n=========================================\") return files, labels ######################################################################## ######################################################################## # main 01_test.py ########################################################################", "and pAUCs averaged_performance = numpy.mean(numpy.array(performance, dtype=float), axis=0) csv_lines.append([\"Average\"] + list(averaged_performance))", "# \"development\": mode == True # \"evaluation\": mode == False", "[re.findall('id_[0-9][0-9]', ext_id) for ext_id in file_paths])))) return machine_id_list def test_file_list_generator(target_dir,", "= numpy.mean(numpy.square(data - model.predict(data)), axis=1) y_pred[file_idx] = numpy.mean(errors) anomaly_score_list.append([os.path.basename(file_path), y_pred[file_idx]])", "file_path in tqdm(enumerate(test_files), total=len(test_files)): try: data = com.file_to_vector_array(file_path, n_mels=param[\"feature\"][\"n_mels\"], frames=param[\"feature\"][\"frames\"],", ": machine_id_list : list [ str ] list of machine", "mode: # output results result_path = \"{result}/{file_name}\".format(result=param[\"result_directory\"], file_name=param[\"result_file\"]) com.logger.info(\"AUC and", "dirs = com.select_dirs(param=param, mode=mode) # initialize lines in csv for", "# extract id machine_id_list = sorted(list(set(itertools.chain.from_iterable( [re.findall('id_[0-9][0-9]', ext_id) for ext_id", "= [0. for k in test_files] for file_idx, file_path in", "file_idx, file_path in tqdm(enumerate(test_files), total=len(test_files)): try: data = com.file_to_vector_array(file_path, n_mels=param[\"feature\"][\"n_mels\"],", "######################################################################## # import additional python-library ######################################################################## import numpy # from", "the mode is \"development\": test_files : list [ str ]", "if mode: # append AUC and pAUC to lists auc", "get_machine_id_list_for_test(target_dir, dir_name=\"test\", ext=\"json\"): \"\"\" target_dir : str base directory path", "python-library ######################################################################## import numpy # from import from tqdm import", "file_paths = sorted(glob.glob(dir_path)) # extract id machine_id_list = sorted(list(set(itertools.chain.from_iterable( [re.findall('id_[0-9][0-9]',", "== False mode = com.command_line_chk() if mode is None: sys.exit(-1)", "prefix_anomaly : str (default=\"anomaly\") anomaly directory name ext : str", "prefix_anomaly=prefix_anomaly, id_name=id_name, ext=ext))) anomaly_labels = numpy.ones(len(anomaly_files)) files = numpy.concatenate((normal_files, anomaly_files),", "load model file if not os.path.exists(model_file): com.logger.error(\"{} model not found", "numpy.mean(errors) anomaly_score_list.append([os.path.basename(file_path), y_pred[file_idx]]) except: com.logger.error(\"file broken!!: {}\".format(file_path)) # save anomaly", "str base directory path of \"dev_data\" or \"eval_data\" test_dir_name :", "as com import keras_model ######################################################################## ######################################################################## # load parameter.yaml ########################################################################", "path model_file = \"{model}/model_{machine_type}.hdf5\".format(model=param[\"model_directory\"], machine_type=machine_type) # load model file if", "for test * normal/anomaly = 0/1 if the mode is", ": {}\".format(target_dir+\"_\"+id_name)) # development if mode: normal_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}\".format(dir=target_dir,", ": str base directory path of \"dev_data\" or \"eval_data\" test_dir_name", "metrics.roc_auc_score(y_true, y_pred, max_fpr=param[\"max_fpr\"]) csv_lines.append([id_str.split(\"_\", 1)[1], auc, p_auc]) performance.append([auc, p_auc]) com.logger.info(\"AUC", "for k in test_files] for file_idx, file_path in tqdm(enumerate(test_files), total=len(test_files)):", "base directory path of \"dev_data\" or \"eval_data\" test_dir_name : str", ": str (default=\"test\") directory containing test data ext : str", "csv_lines = [] # loop of the base directory for", "id of wav file in <<test_dir_name>> directory dir_name : str", "ext=ext)) file_paths = sorted(glob.glob(dir_path)) # extract id machine_id_list = sorted(list(set(itertools.chain.from_iterable(", "ID ==============\") y_pred = [0. for k in test_files] for", "for id_str in machine_id_list: # load test file test_files, y_true", "or \"eval_data\" test_dir_name : str (default=\"test\") directory containing test data", "OF TEST FOR A MACHINE ID ============\") if mode: #", "anomaly_score_csv = \"{result}/anomaly_score_{machine_type}_{id_str}.csv\".format( result=param[\"result_directory\"], machine_type=machine_type, id_str=id_str) anomaly_score_list = [] print(\"\\n==============", "files = sorted( glob.glob(\"{dir}/{dir_name}/*{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, id_name=id_name, ext=ext))) labels = None", "test * normal/anomaly = 0/1 if the mode is \"evaluation\":", "info. list for test * normal/anomaly = 0/1 if the", "main 01_test.py ######################################################################## if __name__ == \"__main__\": # check mode", "{dirname}\".format(dirname=target_dir, idx=idx+1, total=len(dirs))) machine_type = os.path.split(target_dir)[1] print(\"============== MODEL LOAD ==============\")", "numpy.zeros(len(normal_files)) anomaly_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_anomaly=prefix_anomaly, id_name=id_name, ext=ext))) anomaly_labels", ": {}\".format(auc)) com.logger.info(\"pAUC : {}\".format(p_auc)) print(\"\\n============ END OF TEST FOR", "prefix_anomaly=\"anomaly\", ext=\"json\"): \"\"\" target_dir : str base directory path of", "keras_model.load_model(model_file) model.summary() if mode: # results by type csv_lines.append([machine_type]) csv_lines.append([\"id\",", "file extension of audio files return : if the mode", "os.path.abspath(\"{dir}/{dir_name}/*.{ext}\".format(dir=target_dir, dir_name=dir_name, ext=ext)) file_paths = sorted(glob.glob(dir_path)) # extract id machine_id_list", "= [] machine_id_list = get_machine_id_list_for_test(target_dir) print(machine_id_list) for id_str in machine_id_list:", "for file_idx, file_path in tqdm(enumerate(test_files), total=len(test_files)): try: data = com.file_to_vector_array(file_path,", "audio files return : if the mode is \"development\": test_files", "pAUCs averaged_performance = numpy.mean(numpy.array(performance, dtype=float), axis=0) csv_lines.append([\"Average\"] + list(averaged_performance)) csv_lines.append([])", "right reserved. \"\"\" ######################################################################## # import default python-library ######################################################################## import", "default python-library ######################################################################## import os import glob import csv import", "\"pAUC\"]) performance = [] machine_id_list = get_machine_id_list_for_test(target_dir) print(machine_id_list) for id_str", "= com.select_dirs(param=param, mode=mode) # initialize lines in csv for AUC", "num : {num}\".format(num=len(files))) if len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n=========================================\") return", "= metrics.roc_auc_score(y_true, y_pred) p_auc = metrics.roc_auc_score(y_true, y_pred, max_fpr=param[\"max_fpr\"]) csv_lines.append([id_str.split(\"_\", 1)[1],", "tqdm(enumerate(test_files), total=len(test_files)): try: data = com.file_to_vector_array(file_path, n_mels=param[\"feature\"][\"n_mels\"], frames=param[\"feature\"][\"frames\"], n_fft=param[\"feature\"][\"n_fft\"], hop_length=param[\"feature\"][\"hop_length\"],", "idx, target_dir in enumerate(dirs): print(\"\\n===========================\") print(\"[{idx}/{total}] {dirname}\".format(dirname=target_dir, idx=idx+1, total=len(dirs))) machine_type", ": str (default=\"test\") directory containing test data prefix_normal : str", "sorted(list(set(itertools.chain.from_iterable( [re.findall('id_[0-9][0-9]', ext_id) for ext_id in file_paths])))) return machine_id_list def", "\"AUC\", \"pAUC\"]) performance = [] machine_id_list = get_machine_id_list_for_test(target_dir) print(machine_id_list) for", "import re import itertools import sys ######################################################################## ######################################################################## # import", "eval_data id_name : str id of wav file in <<test_dir_name>>", "* normal/anomaly = 0/1 if the mode is \"evaluation\": test_files", "is None: sys.exit(-1) # make output result directory os.makedirs(param[\"result_directory\"], exist_ok=True)", "[] # loop of the base directory for idx, target_dir", "file list for test \"\"\" com.logger.info(\"target_dir : {}\".format(target_dir+\"_\"+id_name)) # development", "file if not os.path.exists(model_file): com.logger.error(\"{} model not found \".format(machine_type)) sys.exit(-1)", "str (default=\"test\") directory containing test data prefix_normal : str (default=\"normal\")", "= csv.writer(f, lineterminator='\\n') writer.writerows(save_data) def get_machine_id_list_for_test(target_dir, dir_name=\"test\", ext=\"json\"): \"\"\" target_dir", "for test test_labels : list [ boolean ] label info.", "os import glob import csv import re import itertools import", "\"\"\" ######################################################################## # import default python-library ######################################################################## import os import", "directory path of \"dev_data\" or \"eval_data\" test_dir_name : str (default=\"test\")", "import sys ######################################################################## ######################################################################## # import additional python-library ######################################################################## import", "check mode # \"development\": mode == True # \"evaluation\": mode", "make output result directory os.makedirs(param[\"result_directory\"], exist_ok=True) # load base directory", "lines in csv for AUC and pAUC csv_lines = []", "p_auc = metrics.roc_auc_score(y_true, y_pred, max_fpr=param[\"max_fpr\"]) csv_lines.append([id_str.split(\"_\", 1)[1], auc, p_auc]) performance.append([auc,", "= metrics.roc_auc_score(y_true, y_pred, max_fpr=param[\"max_fpr\"]) csv_lines.append([id_str.split(\"_\", 1)[1], auc, p_auc]) performance.append([auc, p_auc])", "= os.path.split(target_dir)[1] print(\"============== MODEL LOAD ==============\") # set model path", "directory dir_name : str (default=\"test\") directory containing test data prefix_normal", "errors = numpy.mean(numpy.square(data - model.predict(data)), axis=1) y_pred[file_idx] = numpy.mean(errors) anomaly_score_list.append([os.path.basename(file_path),", "str base directory path of the dev_data or eval_data id_name", "csv_lines.append([\"id\", \"AUC\", \"pAUC\"]) performance = [] machine_id_list = get_machine_id_list_for_test(target_dir) print(machine_id_list)", "list for test \"\"\" com.logger.info(\"target_dir : {}\".format(target_dir+\"_\"+id_name)) # development if", "False mode = com.command_line_chk() if mode is None: sys.exit(-1) #", "from the names of test files \"\"\" # create test", "files dir_path = os.path.abspath(\"{dir}/{dir_name}/*.{ext}\".format(dir=target_dir, dir_name=dir_name, ext=ext)) file_paths = sorted(glob.glob(dir_path)) #", "re import itertools import sys ######################################################################## ######################################################################## # import additional", "id_name : str id of wav file in <<test_dir_name>> directory", "append AUC and pAUC to lists auc = metrics.roc_auc_score(y_true, y_pred)", "id_name, dir_name=\"test\", prefix_normal=\"normal\", prefix_anomaly=\"anomaly\", ext=\"json\"): \"\"\" target_dir : str base", "[0. for k in test_files] for file_idx, file_path in tqdm(enumerate(test_files),", "\".format(machine_type)) sys.exit(-1) model = keras_model.load_model(model_file) model.summary() if mode: # results", "of \"dev_data\" or \"eval_data\" test_dir_name : str (default=\"test\") directory containing", "######################################################################## ######################################################################## # import additional python-library ######################################################################## import numpy #", "anomaly score file path anomaly_score_csv = \"{result}/anomaly_score_{machine_type}_{id_str}.csv\".format( result=param[\"result_directory\"], machine_type=machine_type, id_str=id_str)", "# evaluation else: files = sorted( glob.glob(\"{dir}/{dir_name}/*{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, id_name=id_name, ext=ext)))", "normal_labels = numpy.zeros(len(normal_files)) anomaly_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_anomaly=prefix_anomaly, id_name=id_name,", "param = com.yaml_load() ####################################################################### ######################################################################## # def ######################################################################## def save_csv(save_file_path,", "# check mode # \"development\": mode == True # \"evaluation\":", "Ltd. All right reserved. \"\"\" ######################################################################## # import default python-library", "= com.file_to_vector_array(file_path, n_mels=param[\"feature\"][\"n_mels\"], frames=param[\"feature\"][\"frames\"], n_fft=param[\"feature\"][\"n_fft\"], hop_length=param[\"feature\"][\"hop_length\"], power=param[\"feature\"][\"power\"]) errors = numpy.mean(numpy.square(data", "= keras_model.load_model(model_file) model.summary() if mode: # results by type csv_lines.append([machine_type])", "keras_model ######################################################################## ######################################################################## # load parameter.yaml ######################################################################## param = com.yaml_load()", "# import default python-library ######################################################################## import os import glob import", "mode: normal_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_normal=prefix_normal, id_name=id_name, ext=ext))) normal_labels", "id_str=id_str) anomaly_score_list = [] print(\"\\n============== BEGIN TEST FOR A MACHINE", "axis=0) csv_lines.append([\"Average\"] + list(averaged_performance)) csv_lines.append([]) if mode: # output results", "= \"{result}/{file_name}\".format(result=param[\"result_directory\"], file_name=param[\"result_file\"]) com.logger.info(\"AUC and pAUC results -> {}\".format(result_path)) save_csv(save_file_path=result_path,", "mode: # calculate averages for AUCs and pAUCs averaged_performance =", "MODEL LOAD ==============\") # set model path model_file = \"{model}/model_{machine_type}.hdf5\".format(model=param[\"model_directory\"],", "save_csv(save_file_path, save_data): with open(save_file_path, \"w\", newline=\"\") as f: writer =", "==============\") # set model path model_file = \"{model}/model_{machine_type}.hdf5\".format(model=param[\"model_directory\"], machine_type=machine_type) #", "dir_name=dir_name, ext=ext)) file_paths = sorted(glob.glob(dir_path)) # extract id machine_id_list =", ": str (default=\"wav) file extension of audio files return :", "Script for test @author <NAME>, <NAME>, and <NAME> (Hitachi Ltd.)", "return machine_id_list def test_file_list_generator(target_dir, id_name, dir_name=\"test\", prefix_normal=\"normal\", prefix_anomaly=\"anomaly\", ext=\"json\"): \"\"\"", "sorted( glob.glob(\"{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_anomaly=prefix_anomaly, id_name=id_name, ext=ext))) anomaly_labels = numpy.ones(len(anomaly_files)) files", "power=param[\"feature\"][\"power\"]) errors = numpy.mean(numpy.square(data - model.predict(data)), axis=1) y_pred[file_idx] = numpy.mean(errors)", "load parameter.yaml ######################################################################## param = com.yaml_load() ####################################################################### ######################################################################## # def", ": list [ str ] list of machine IDs extracted", "axis=0) com.logger.info(\"test_file num : {num}\".format(num=len(files))) if len(files) == 0: com.logger.exception(\"no_wav_file!!\")", "result -> {}\".format(anomaly_score_csv)) if mode: # append AUC and pAUC", "com.logger.info(\"pAUC : {}\".format(p_auc)) print(\"\\n============ END OF TEST FOR A MACHINE", "writer = csv.writer(f, lineterminator='\\n') writer.writerows(save_data) def get_machine_id_list_for_test(target_dir, dir_name=\"test\", ext=\"json\"): \"\"\"", "name prefix_anomaly : str (default=\"anomaly\") anomaly directory name ext :", "list for test test_labels : list [ boolean ] label", "dir_name=dir_name, prefix_normal=prefix_normal, id_name=id_name, ext=ext))) normal_labels = numpy.zeros(len(normal_files)) anomaly_files = sorted(", "in tqdm(enumerate(test_files), total=len(test_files)): try: data = com.file_to_vector_array(file_path, n_mels=param[\"feature\"][\"n_mels\"], frames=param[\"feature\"][\"frames\"], n_fft=param[\"feature\"][\"n_fft\"],", "anomaly_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_anomaly=prefix_anomaly, id_name=id_name, ext=ext))) anomaly_labels =", "csv_lines.append([machine_type]) csv_lines.append([\"id\", \"AUC\", \"pAUC\"]) performance = [] machine_id_list = get_machine_id_list_for_test(target_dir)", "hop_length=param[\"feature\"][\"hop_length\"], power=param[\"feature\"][\"power\"]) errors = numpy.mean(numpy.square(data - model.predict(data)), axis=1) y_pred[file_idx] =", "result directory os.makedirs(param[\"result_directory\"], exist_ok=True) # load base directory dirs =", "# loop of the base directory for idx, target_dir in", "additional python-library ######################################################################## import numpy # from import from tqdm", "# append AUC and pAUC to lists auc = metrics.roc_auc_score(y_true,", "# setup anomaly score file path anomaly_score_csv = \"{result}/anomaly_score_{machine_type}_{id_str}.csv\".format( result=param[\"result_directory\"],", "com.logger.exception(\"no_wav_file!!\") print(\"\\n========================================\") # evaluation else: files = sorted( glob.glob(\"{dir}/{dir_name}/*{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name,", "the mode is \"evaluation\": test_files : list [ str ]", "files, labels ######################################################################## ######################################################################## # main 01_test.py ######################################################################## if __name__", "print(\"[{idx}/{total}] {dirname}\".format(dirname=target_dir, idx=idx+1, total=len(dirs))) machine_type = os.path.split(target_dir)[1] print(\"============== MODEL LOAD", "dir_name=dir_name, id_name=id_name, ext=ext))) labels = None com.logger.info(\"test_file num : {num}\".format(num=len(files)))", "target_dir : str base directory path of \"dev_data\" or \"eval_data\"", "= \"{result}/anomaly_score_{machine_type}_{id_str}.csv\".format( result=param[\"result_directory\"], machine_type=machine_type, id_str=id_str) anomaly_score_list = [] print(\"\\n============== BEGIN", "dir_path = os.path.abspath(\"{dir}/{dir_name}/*.{ext}\".format(dir=target_dir, dir_name=dir_name, ext=ext)) file_paths = sorted(glob.glob(dir_path)) # extract", "def get_machine_id_list_for_test(target_dir, dir_name=\"test\", ext=\"json\"): \"\"\" target_dir : str base directory", "load base directory dirs = com.select_dirs(param=param, mode=mode) # initialize lines", "Ltd.) Copyright (C) 2020 Hitachi, Ltd. All right reserved. \"\"\"", "test_file_list_generator(target_dir, id_str) # setup anomaly score file path anomaly_score_csv =", "= sorted( glob.glob(\"{dir}/{dir_name}/*{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, id_name=id_name, ext=ext))) labels = None com.logger.info(\"test_file", "the dev_data or eval_data id_name : str id of wav", "from import from tqdm import tqdm from sklearn import metrics", ": str (default=\"normal\") normal directory name prefix_anomaly : str (default=\"anomaly\")", "ext : str (default=\"wav) file extension of audio files return", "import additional python-library ######################################################################## import numpy # from import from", ": str base directory path of the dev_data or eval_data", "file path anomaly_score_csv = \"{result}/anomaly_score_{machine_type}_{id_str}.csv\".format( result=param[\"result_directory\"], machine_type=machine_type, id_str=id_str) anomaly_score_list =", "import metrics # original lib import common as com import", "mode = com.command_line_chk() if mode is None: sys.exit(-1) # make", "labels ######################################################################## ######################################################################## # main 01_test.py ######################################################################## if __name__ ==", "from tqdm import tqdm from sklearn import metrics # original", "list [ str ] list of machine IDs extracted from", "for ext_id in file_paths])))) return machine_id_list def test_file_list_generator(target_dir, id_name, dir_name=\"test\",", "of test files \"\"\" # create test files dir_path =", "writer.writerows(save_data) def get_machine_id_list_for_test(target_dir, dir_name=\"test\", ext=\"json\"): \"\"\" target_dir : str base", "if mode: # results by type csv_lines.append([machine_type]) csv_lines.append([\"id\", \"AUC\", \"pAUC\"])", "com.file_to_vector_array(file_path, n_mels=param[\"feature\"][\"n_mels\"], frames=param[\"feature\"][\"frames\"], n_fft=param[\"feature\"][\"n_fft\"], hop_length=param[\"feature\"][\"hop_length\"], power=param[\"feature\"][\"power\"]) errors = numpy.mean(numpy.square(data -", "labels = numpy.concatenate((normal_labels, anomaly_labels), axis=0) com.logger.info(\"test_file num : {num}\".format(num=len(files))) if", "sklearn import metrics # original lib import common as com", "y_pred[file_idx]]) except: com.logger.error(\"file broken!!: {}\".format(file_path)) # save anomaly score save_csv(save_file_path=anomaly_score_csv,", "test_files] for file_idx, file_path in tqdm(enumerate(test_files), total=len(test_files)): try: data =", "csv.writer(f, lineterminator='\\n') writer.writerows(save_data) def get_machine_id_list_for_test(target_dir, dir_name=\"test\", ext=\"json\"): \"\"\" target_dir :", "normal directory name prefix_anomaly : str (default=\"anomaly\") anomaly directory name", "directory containing test data prefix_normal : str (default=\"normal\") normal directory", "com.yaml_load() ####################################################################### ######################################################################## # def ######################################################################## def save_csv(save_file_path, save_data): with", "name ext : str (default=\"wav\") file extension of audio files", "anomaly_files), axis=0) labels = numpy.concatenate((normal_labels, anomaly_labels), axis=0) com.logger.info(\"test_file num :", "\"{result}/anomaly_score_{machine_type}_{id_str}.csv\".format( result=param[\"result_directory\"], machine_type=machine_type, id_str=id_str) anomaly_score_list = [] print(\"\\n============== BEGIN TEST", "+ list(averaged_performance)) csv_lines.append([]) if mode: # output results result_path =", "in enumerate(dirs): print(\"\\n===========================\") print(\"[{idx}/{total}] {dirname}\".format(dirname=target_dir, idx=idx+1, total=len(dirs))) machine_type = os.path.split(target_dir)[1]", "if the mode is \"evaluation\": test_files : list [ str", "return : machine_id_list : list [ str ] list of", ": list [ str ] file list for test test_labels", "directory path of the dev_data or eval_data id_name : str", "str (default=\"wav\") file extension of audio files return : if", "extension of audio files return : machine_id_list : list [", "\"__main__\": # check mode # \"development\": mode == True #", "@author <NAME>, <NAME>, and <NAME> (Hitachi Ltd.) Copyright (C) 2020", "import csv import re import itertools import sys ######################################################################## ########################################################################", "result_path = \"{result}/{file_name}\".format(result=param[\"result_directory\"], file_name=param[\"result_file\"]) com.logger.info(\"AUC and pAUC results -> {}\".format(result_path))", "in file_paths])))) return machine_id_list def test_file_list_generator(target_dir, id_name, dir_name=\"test\", prefix_normal=\"normal\", prefix_anomaly=\"anomaly\",", "return : if the mode is \"development\": test_files : list", "######################################################################## # def ######################################################################## def save_csv(save_file_path, save_data): with open(save_file_path, \"w\",", "data = com.file_to_vector_array(file_path, n_mels=param[\"feature\"][\"n_mels\"], frames=param[\"feature\"][\"frames\"], n_fft=param[\"feature\"][\"n_fft\"], hop_length=param[\"feature\"][\"hop_length\"], power=param[\"feature\"][\"power\"]) errors =", "= os.path.abspath(\"{dir}/{dir_name}/*.{ext}\".format(dir=target_dir, dir_name=dir_name, ext=ext)) file_paths = sorted(glob.glob(dir_path)) # extract id", "averages for AUCs and pAUCs averaged_performance = numpy.mean(numpy.array(performance, dtype=float), axis=0)", "(default=\"test\") directory containing test data ext : str (default=\"wav) file", "machine_type=machine_type) # load model file if not os.path.exists(model_file): com.logger.error(\"{} model", "pAUC csv_lines = [] # loop of the base directory", "k in test_files] for file_idx, file_path in tqdm(enumerate(test_files), total=len(test_files)): try:", "test_dir_name : str (default=\"test\") directory containing test data ext :", "get_machine_id_list_for_test(target_dir) print(machine_id_list) for id_str in machine_id_list: # load test file", "ext_id) for ext_id in file_paths])))) return machine_id_list def test_file_list_generator(target_dir, id_name,", "max_fpr=param[\"max_fpr\"]) csv_lines.append([id_str.split(\"_\", 1)[1], auc, p_auc]) performance.append([auc, p_auc]) com.logger.info(\"AUC : {}\".format(auc))", "calculate averages for AUCs and pAUCs averaged_performance = numpy.mean(numpy.array(performance, dtype=float),", "test_labels : list [ boolean ] label info. list for", "com.select_dirs(param=param, mode=mode) # initialize lines in csv for AUC and", "y_true = test_file_list_generator(target_dir, id_str) # setup anomaly score file path", "0: com.logger.exception(\"no_wav_file!!\") print(\"\\n=========================================\") return files, labels ######################################################################## ######################################################################## # main", "csv_lines.append([]) if mode: # output results result_path = \"{result}/{file_name}\".format(result=param[\"result_directory\"], file_name=param[\"result_file\"])", "of the dev_data or eval_data id_name : str id of", "prefix_normal=\"normal\", prefix_anomaly=\"anomaly\", ext=\"json\"): \"\"\" target_dir : str base directory path", "newline=\"\") as f: writer = csv.writer(f, lineterminator='\\n') writer.writerows(save_data) def get_machine_id_list_for_test(target_dir,", "found \".format(machine_type)) sys.exit(-1) model = keras_model.load_model(model_file) model.summary() if mode: #", "- model.predict(data)), axis=1) y_pred[file_idx] = numpy.mean(errors) anomaly_score_list.append([os.path.basename(file_path), y_pred[file_idx]]) except: com.logger.error(\"file", "ID ============\") if mode: # calculate averages for AUCs and", "extracted from the names of test files \"\"\" # create", "file extension of audio files return : machine_id_list : list", "{}\".format(anomaly_score_csv)) if mode: # append AUC and pAUC to lists", "model file if not os.path.exists(model_file): com.logger.error(\"{} model not found \".format(machine_type))", "numpy.ones(len(anomaly_files)) files = numpy.concatenate((normal_files, anomaly_files), axis=0) labels = numpy.concatenate((normal_labels, anomaly_labels),", "axis=1) y_pred[file_idx] = numpy.mean(errors) anomaly_score_list.append([os.path.basename(file_path), y_pred[file_idx]]) except: com.logger.error(\"file broken!!: {}\".format(file_path))", "glob.glob(\"{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_normal=prefix_normal, id_name=id_name, ext=ext))) normal_labels = numpy.zeros(len(normal_files)) anomaly_files =", "TEST FOR A MACHINE ID ============\") if mode: # calculate", "list [ str ] file list for test test_labels :", "test file test_files, y_true = test_file_list_generator(target_dir, id_str) # setup anomaly", "except: com.logger.error(\"file broken!!: {}\".format(file_path)) # save anomaly score save_csv(save_file_path=anomaly_score_csv, save_data=anomaly_score_list)", "metrics.roc_auc_score(y_true, y_pred) p_auc = metrics.roc_auc_score(y_true, y_pred, max_fpr=param[\"max_fpr\"]) csv_lines.append([id_str.split(\"_\", 1)[1], auc,", "mode is None: sys.exit(-1) # make output result directory os.makedirs(param[\"result_directory\"],", "target_dir in enumerate(dirs): print(\"\\n===========================\") print(\"[{idx}/{total}] {dirname}\".format(dirname=target_dir, idx=idx+1, total=len(dirs))) machine_type =", "test data ext : str (default=\"wav) file extension of audio", "f: writer = csv.writer(f, lineterminator='\\n') writer.writerows(save_data) def get_machine_id_list_for_test(target_dir, dir_name=\"test\", ext=\"json\"):", "results result_path = \"{result}/{file_name}\".format(result=param[\"result_directory\"], file_name=param[\"result_file\"]) com.logger.info(\"AUC and pAUC results ->", "y_pred = [0. for k in test_files] for file_idx, file_path", "A MACHINE ID ============\") if mode: # calculate averages for", "[] machine_id_list = get_machine_id_list_for_test(target_dir) print(machine_id_list) for id_str in machine_id_list: #", "# load base directory dirs = com.select_dirs(param=param, mode=mode) # initialize", "and pAUC to lists auc = metrics.roc_auc_score(y_true, y_pred) p_auc =", "<<test_dir_name>> directory dir_name : str (default=\"test\") directory containing test data", "directory os.makedirs(param[\"result_directory\"], exist_ok=True) # load base directory dirs = com.select_dirs(param=param,", "path of the dev_data or eval_data id_name : str id", "= sorted(list(set(itertools.chain.from_iterable( [re.findall('id_[0-9][0-9]', ext_id) for ext_id in file_paths])))) return machine_id_list", "id_str) # setup anomaly score file path anomaly_score_csv = \"{result}/anomaly_score_{machine_type}_{id_str}.csv\".format(", "if __name__ == \"__main__\": # check mode # \"development\": mode", "[ str ] list of machine IDs extracted from the", "import tqdm from sklearn import metrics # original lib import", "= com.command_line_chk() if mode is None: sys.exit(-1) # make output", "model = keras_model.load_model(model_file) model.summary() if mode: # results by type", "==============\") y_pred = [0. for k in test_files] for file_idx,", "True # \"evaluation\": mode == False mode = com.command_line_chk() if", "sorted( glob.glob(\"{dir}/{dir_name}/*{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, id_name=id_name, ext=ext))) labels = None com.logger.info(\"test_file num", "\"\"\" # create test files dir_path = os.path.abspath(\"{dir}/{dir_name}/*.{ext}\".format(dir=target_dir, dir_name=dir_name, ext=ext))", "# development if mode: normal_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_normal=prefix_normal,", "glob.glob(\"{dir}/{dir_name}/*{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, id_name=id_name, ext=ext))) labels = None com.logger.info(\"test_file num :", "# create test files dir_path = os.path.abspath(\"{dir}/{dir_name}/*.{ext}\".format(dir=target_dir, dir_name=dir_name, ext=ext)) file_paths", "= numpy.concatenate((normal_files, anomaly_files), axis=0) labels = numpy.concatenate((normal_labels, anomaly_labels), axis=0) com.logger.info(\"test_file", "@file 01_test.py @brief Script for test @author <NAME>, <NAME>, and", ": list [ boolean ] label info. list for test", "str ] file list for test \"\"\" com.logger.info(\"target_dir : {}\".format(target_dir+\"_\"+id_name))", "\"development\": test_files : list [ str ] file list for", "containing test data prefix_normal : str (default=\"normal\") normal directory name", "= numpy.zeros(len(normal_files)) anomaly_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_anomaly=prefix_anomaly, id_name=id_name, ext=ext)))", "-> {}\".format(anomaly_score_csv)) if mode: # append AUC and pAUC to", "labels = None com.logger.info(\"test_file num : {num}\".format(num=len(files))) if len(files) ==", "save_data=anomaly_score_list) com.logger.info(\"anomaly score result -> {}\".format(anomaly_score_csv)) if mode: # append", "list of machine IDs extracted from the names of test", "= numpy.ones(len(anomaly_files)) files = numpy.concatenate((normal_files, anomaly_files), axis=0) labels = numpy.concatenate((normal_labels,", "total=len(test_files)): try: data = com.file_to_vector_array(file_path, n_mels=param[\"feature\"][\"n_mels\"], frames=param[\"feature\"][\"frames\"], n_fft=param[\"feature\"][\"n_fft\"], hop_length=param[\"feature\"][\"hop_length\"], power=param[\"feature\"][\"power\"])", "Copyright (C) 2020 Hitachi, Ltd. All right reserved. \"\"\" ########################################################################", "str (default=\"anomaly\") anomaly directory name ext : str (default=\"wav\") file", "\"evaluation\": mode == False mode = com.command_line_chk() if mode is", "com.logger.info(\"target_dir : {}\".format(target_dir+\"_\"+id_name)) # development if mode: normal_files = sorted(", "auc, p_auc]) performance.append([auc, p_auc]) com.logger.info(\"AUC : {}\".format(auc)) com.logger.info(\"pAUC : {}\".format(p_auc))", ": if the mode is \"development\": test_files : list [", "{num}\".format(num=len(files))) if len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n=========================================\") return files, labels", "id_str in machine_id_list: # load test file test_files, y_true =", "com.logger.info(\"test_file num : {num}\".format(num=len(files))) if len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n=========================================\")", "file in <<test_dir_name>> directory dir_name : str (default=\"test\") directory containing", "result=param[\"result_directory\"], machine_type=machine_type, id_str=id_str) anomaly_score_list = [] print(\"\\n============== BEGIN TEST FOR", "p_auc]) performance.append([auc, p_auc]) com.logger.info(\"AUC : {}\".format(auc)) com.logger.info(\"pAUC : {}\".format(p_auc)) print(\"\\n============", "csv import re import itertools import sys ######################################################################## ######################################################################## #", "# original lib import common as com import keras_model ########################################################################", "def save_csv(save_file_path, save_data): with open(save_file_path, \"w\", newline=\"\") as f: writer", "= 0/1 if the mode is \"evaluation\": test_files : list", "END OF TEST FOR A MACHINE ID ============\") if mode:", "mode is \"development\": test_files : list [ str ] file", "evaluation else: files = sorted( glob.glob(\"{dir}/{dir_name}/*{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, id_name=id_name, ext=ext))) labels", "test files dir_path = os.path.abspath(\"{dir}/{dir_name}/*.{ext}\".format(dir=target_dir, dir_name=dir_name, ext=ext)) file_paths = sorted(glob.glob(dir_path))", "test files \"\"\" # create test files dir_path = os.path.abspath(\"{dir}/{dir_name}/*.{ext}\".format(dir=target_dir,", "in <<test_dir_name>> directory dir_name : str (default=\"test\") directory containing test", "model path model_file = \"{model}/model_{machine_type}.hdf5\".format(model=param[\"model_directory\"], machine_type=machine_type) # load model file", "list [ str ] file list for test \"\"\" com.logger.info(\"target_dir", "in csv for AUC and pAUC csv_lines = [] #", "machine_id_list def test_file_list_generator(target_dir, id_name, dir_name=\"test\", prefix_normal=\"normal\", prefix_anomaly=\"anomaly\", ext=\"json\"): \"\"\" target_dir", "print(\"============== MODEL LOAD ==============\") # set model path model_file =", "model.summary() if mode: # results by type csv_lines.append([machine_type]) csv_lines.append([\"id\", \"AUC\",", "of audio files return : if the mode is \"development\":", "mode # \"development\": mode == True # \"evaluation\": mode ==", "results by type csv_lines.append([machine_type]) csv_lines.append([\"id\", \"AUC\", \"pAUC\"]) performance = []", "[ boolean ] label info. list for test * normal/anomaly", "id_name=id_name, ext=ext))) labels = None com.logger.info(\"test_file num : {num}\".format(num=len(files))) if", "== 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n=========================================\") return files, labels ######################################################################## ######################################################################## #", "performance = [] machine_id_list = get_machine_id_list_for_test(target_dir) print(machine_id_list) for id_str in", "idx=idx+1, total=len(dirs))) machine_type = os.path.split(target_dir)[1] print(\"============== MODEL LOAD ==============\") #", "<NAME> (Hitachi Ltd.) Copyright (C) 2020 Hitachi, Ltd. All right", "machine IDs extracted from the names of test files \"\"\"", "if len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n=========================================\") return files, labels ########################################################################", "def ######################################################################## def save_csv(save_file_path, save_data): with open(save_file_path, \"w\", newline=\"\") as", "from sklearn import metrics # original lib import common as", "dir_name=dir_name, prefix_anomaly=prefix_anomaly, id_name=id_name, ext=ext))) anomaly_labels = numpy.ones(len(anomaly_files)) files = numpy.concatenate((normal_files,", "of machine IDs extracted from the names of test files", "= numpy.concatenate((normal_labels, anomaly_labels), axis=0) com.logger.info(\"test_file num : {num}\".format(num=len(files))) if len(files)", "== \"__main__\": # check mode # \"development\": mode == True", "import glob import csv import re import itertools import sys", "glob.glob(\"{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_anomaly=prefix_anomaly, id_name=id_name, ext=ext))) anomaly_labels = numpy.ones(len(anomaly_files)) files =", "None com.logger.info(\"test_file num : {num}\".format(num=len(files))) if len(files) == 0: com.logger.exception(\"no_wav_file!!\")", "list [ boolean ] label info. list for test *", "list for test * normal/anomaly = 0/1 if the mode", "[ str ] file list for test \"\"\" com.logger.info(\"target_dir :", "ext=ext))) anomaly_labels = numpy.ones(len(anomaly_files)) files = numpy.concatenate((normal_files, anomaly_files), axis=0) labels", "] label info. list for test * normal/anomaly = 0/1", "{num}\".format(num=len(files))) if len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n========================================\") # evaluation else:", "y_pred) p_auc = metrics.roc_auc_score(y_true, y_pred, max_fpr=param[\"max_fpr\"]) csv_lines.append([id_str.split(\"_\", 1)[1], auc, p_auc])", "####################################################################### ######################################################################## # def ######################################################################## def save_csv(save_file_path, save_data): with open(save_file_path,", "str (default=\"normal\") normal directory name prefix_anomaly : str (default=\"anomaly\") anomaly", "anomaly_score_list = [] print(\"\\n============== BEGIN TEST FOR A MACHINE ID", "extract id machine_id_list = sorted(list(set(itertools.chain.from_iterable( [re.findall('id_[0-9][0-9]', ext_id) for ext_id in", "audio files return : machine_id_list : list [ str ]", "ext=\"json\"): \"\"\" target_dir : str base directory path of the", "if len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n========================================\") # evaluation else: files", "ext : str (default=\"wav\") file extension of audio files return", "names of test files \"\"\" # create test files dir_path", "= com.yaml_load() ####################################################################### ######################################################################## # def ######################################################################## def save_csv(save_file_path, save_data):", "lists auc = metrics.roc_auc_score(y_true, y_pred) p_auc = metrics.roc_auc_score(y_true, y_pred, max_fpr=param[\"max_fpr\"])", "the base directory for idx, target_dir in enumerate(dirs): print(\"\\n===========================\") print(\"[{idx}/{total}]", "os.path.exists(model_file): com.logger.error(\"{} model not found \".format(machine_type)) sys.exit(-1) model = keras_model.load_model(model_file)", "total=len(dirs))) machine_type = os.path.split(target_dir)[1] print(\"============== MODEL LOAD ==============\") # set", "# results by type csv_lines.append([machine_type]) csv_lines.append([\"id\", \"AUC\", \"pAUC\"]) performance =", "directory dirs = com.select_dirs(param=param, mode=mode) # initialize lines in csv", "dir_name=\"test\", prefix_normal=\"normal\", prefix_anomaly=\"anomaly\", ext=\"json\"): \"\"\" target_dir : str base directory", "not found \".format(machine_type)) sys.exit(-1) model = keras_model.load_model(model_file) model.summary() if mode:", "@brief Script for test @author <NAME>, <NAME>, and <NAME> (Hitachi", "metrics # original lib import common as com import keras_model", "FOR A MACHINE ID ============\") if mode: # calculate averages", "str (default=\"wav) file extension of audio files return : machine_id_list", "and <NAME> (Hitachi Ltd.) Copyright (C) 2020 Hitachi, Ltd. All", "os.makedirs(param[\"result_directory\"], exist_ok=True) # load base directory dirs = com.select_dirs(param=param, mode=mode)", "base directory dirs = com.select_dirs(param=param, mode=mode) # initialize lines in", "######################################################################## param = com.yaml_load() ####################################################################### ######################################################################## # def ######################################################################## def", "\"eval_data\" test_dir_name : str (default=\"test\") directory containing test data ext", "setup anomaly score file path anomaly_score_csv = \"{result}/anomaly_score_{machine_type}_{id_str}.csv\".format( result=param[\"result_directory\"], machine_type=machine_type,", "csv_lines.append([id_str.split(\"_\", 1)[1], auc, p_auc]) performance.append([auc, p_auc]) com.logger.info(\"AUC : {}\".format(auc)) com.logger.info(\"pAUC", "machine_id_list = get_machine_id_list_for_test(target_dir) print(machine_id_list) for id_str in machine_id_list: # load", ": {num}\".format(num=len(files))) if len(files) == 0: com.logger.exception(\"no_wav_file!!\") print(\"\\n=========================================\") return files,", ": {}\".format(p_auc)) print(\"\\n============ END OF TEST FOR A MACHINE ID", "machine_id_list: # load test file test_files, y_true = test_file_list_generator(target_dir, id_str)", "print(\"\\n============== BEGIN TEST FOR A MACHINE ID ==============\") y_pred =", "import os import glob import csv import re import itertools", "(default=\"wav) file extension of audio files return : machine_id_list :", "(Hitachi Ltd.) Copyright (C) 2020 Hitachi, Ltd. All right reserved.", "machine_id_list = sorted(list(set(itertools.chain.from_iterable( [re.findall('id_[0-9][0-9]', ext_id) for ext_id in file_paths])))) return", "anomaly directory name ext : str (default=\"wav\") file extension of", "axis=0) labels = numpy.concatenate((normal_labels, anomaly_labels), axis=0) com.logger.info(\"test_file num : {num}\".format(num=len(files)))", "# make output result directory os.makedirs(param[\"result_directory\"], exist_ok=True) # load base", "com.logger.error(\"{} model not found \".format(machine_type)) sys.exit(-1) model = keras_model.load_model(model_file) model.summary()", "mode: # results by type csv_lines.append([machine_type]) csv_lines.append([\"id\", \"AUC\", \"pAUC\"]) performance", "wav file in <<test_dir_name>> directory dir_name : str (default=\"test\") directory", "def test_file_list_generator(target_dir, id_name, dir_name=\"test\", prefix_normal=\"normal\", prefix_anomaly=\"anomaly\", ext=\"json\"): \"\"\" target_dir :", "LOAD ==============\") # set model path model_file = \"{model}/model_{machine_type}.hdf5\".format(model=param[\"model_directory\"], machine_type=machine_type)", "(C) 2020 Hitachi, Ltd. All right reserved. \"\"\" ######################################################################## #", "IDs extracted from the names of test files \"\"\" #", "output results result_path = \"{result}/{file_name}\".format(result=param[\"result_directory\"], file_name=param[\"result_file\"]) com.logger.info(\"AUC and pAUC results", "containing test data ext : str (default=\"wav) file extension of", "sorted(glob.glob(dir_path)) # extract id machine_id_list = sorted(list(set(itertools.chain.from_iterable( [re.findall('id_[0-9][0-9]', ext_id) for", "dir_name : str (default=\"test\") directory containing test data prefix_normal :", "numpy.mean(numpy.square(data - model.predict(data)), axis=1) y_pred[file_idx] = numpy.mean(errors) anomaly_score_list.append([os.path.basename(file_path), y_pred[file_idx]]) except:", "com.logger.error(\"file broken!!: {}\".format(file_path)) # save anomaly score save_csv(save_file_path=anomaly_score_csv, save_data=anomaly_score_list) com.logger.info(\"anomaly", "\"w\", newline=\"\") as f: writer = csv.writer(f, lineterminator='\\n') writer.writerows(save_data) def", "if mode: normal_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_normal=prefix_normal, id_name=id_name, ext=ext)))", "id_name=id_name, ext=ext))) anomaly_labels = numpy.ones(len(anomaly_files)) files = numpy.concatenate((normal_files, anomaly_files), axis=0)", "load test file test_files, y_true = test_file_list_generator(target_dir, id_str) # setup", "# initialize lines in csv for AUC and pAUC csv_lines", "target_dir : str base directory path of the dev_data or", "normal_files = sorted( glob.glob(\"{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}\".format(dir=target_dir, dir_name=dir_name, prefix_normal=prefix_normal, id_name=id_name, ext=ext))) normal_labels =" ]
[ "= argparse.ArgumentParser(description=\"a simple parser\") parser.add_argument(\"filename\", type=str) parser.add_argument(\"lineno\", nargs=\"+\", type=int) parser.add_argument(\"--same_length\",", "= os.path.splitext(filename) for i, result in enumerate(results): with open(f\"{name}-{i +", "parser.add_argument(\"--same_length\", action=argparse.BooleanOptionalAction) def main(): args = parser.parse_args() filename = args.filename", "same_length = args.same_length linenos = list(map(lambda x: x - 1,", "= args.lineno same_length = args.same_length linenos = list(map(lambda x: x", "1 if linenos[0] else 100000 start = 0 while start", "1, linenos)) linenos.sort() results = [] with open(filename, \"r\", encoding=\"utf-8\")", "0 for lineno in linenos: results.append(\"\".join(content[start:lineno])) start = lineno results.append(\"\".join(content[start:]))", "while start < len(content): results.append(\"\".join(content[start: start + lineno])) start +=", "same_length: start = 0 for lineno in linenos: results.append(\"\".join(content[start:lineno])) start", "in enumerate(results): with open(f\"{name}-{i + 1:02}{ext}\", \"w\", encoding=\"utf-8\") as f:", "\"r\", encoding=\"utf-8\") as f: content = f.readlines() if not same_length:", "else: lineno = linenos[0] + 1 if linenos[0] else 100000", "results.append(\"\".join(content[start: start + lineno])) start += lineno name, ext =", "f.readlines() if not same_length: start = 0 for lineno in", "i, result in enumerate(results): with open(f\"{name}-{i + 1:02}{ext}\", \"w\", encoding=\"utf-8\")", "<reponame>i1123581321/word_split import argparse import os parser = argparse.ArgumentParser(description=\"a simple parser\")", "= parser.parse_args() filename = args.filename linenos = args.lineno same_length =", "[] with open(filename, \"r\", encoding=\"utf-8\") as f: content = f.readlines()", "with open(filename, \"r\", encoding=\"utf-8\") as f: content = f.readlines() if", "100000 start = 0 while start < len(content): results.append(\"\".join(content[start: start", "+ 1 if linenos[0] else 100000 start = 0 while", "enumerate(results): with open(f\"{name}-{i + 1:02}{ext}\", \"w\", encoding=\"utf-8\") as f: f.write(result)", "start < len(content): results.append(\"\".join(content[start: start + lineno])) start += lineno", "linenos = args.lineno same_length = args.same_length linenos = list(map(lambda x:", "linenos: results.append(\"\".join(content[start:lineno])) start = lineno results.append(\"\".join(content[start:])) else: lineno = linenos[0]", "start + lineno])) start += lineno name, ext = os.path.splitext(filename)", "def main(): args = parser.parse_args() filename = args.filename linenos =", "argparse import os parser = argparse.ArgumentParser(description=\"a simple parser\") parser.add_argument(\"filename\", type=str)", "args.filename linenos = args.lineno same_length = args.same_length linenos = list(map(lambda", "+ lineno])) start += lineno name, ext = os.path.splitext(filename) for", "0 while start < len(content): results.append(\"\".join(content[start: start + lineno])) start", "= 0 while start < len(content): results.append(\"\".join(content[start: start + lineno]))", "linenos[0] else 100000 start = 0 while start < len(content):", "parser.add_argument(\"lineno\", nargs=\"+\", type=int) parser.add_argument(\"--same_length\", action=argparse.BooleanOptionalAction) def main(): args = parser.parse_args()", "start = 0 while start < len(content): results.append(\"\".join(content[start: start +", "= lineno results.append(\"\".join(content[start:])) else: lineno = linenos[0] + 1 if", "lineno = linenos[0] + 1 if linenos[0] else 100000 start", "lineno results.append(\"\".join(content[start:])) else: lineno = linenos[0] + 1 if linenos[0]", "type=str) parser.add_argument(\"lineno\", nargs=\"+\", type=int) parser.add_argument(\"--same_length\", action=argparse.BooleanOptionalAction) def main(): args =", "args.lineno same_length = args.same_length linenos = list(map(lambda x: x -", "x - 1, linenos)) linenos.sort() results = [] with open(filename,", "as f: content = f.readlines() if not same_length: start =", "args.same_length linenos = list(map(lambda x: x - 1, linenos)) linenos.sort()", "linenos = list(map(lambda x: x - 1, linenos)) linenos.sort() results", "main(): args = parser.parse_args() filename = args.filename linenos = args.lineno", "= 0 for lineno in linenos: results.append(\"\".join(content[start:lineno])) start = lineno", "parser\") parser.add_argument(\"filename\", type=str) parser.add_argument(\"lineno\", nargs=\"+\", type=int) parser.add_argument(\"--same_length\", action=argparse.BooleanOptionalAction) def main():", "for lineno in linenos: results.append(\"\".join(content[start:lineno])) start = lineno results.append(\"\".join(content[start:])) else:", "import argparse import os parser = argparse.ArgumentParser(description=\"a simple parser\") parser.add_argument(\"filename\",", "- 1, linenos)) linenos.sort() results = [] with open(filename, \"r\",", "if linenos[0] else 100000 start = 0 while start <", "lineno name, ext = os.path.splitext(filename) for i, result in enumerate(results):", "list(map(lambda x: x - 1, linenos)) linenos.sort() results = []", "start += lineno name, ext = os.path.splitext(filename) for i, result", "parser.parse_args() filename = args.filename linenos = args.lineno same_length = args.same_length", "linenos.sort() results = [] with open(filename, \"r\", encoding=\"utf-8\") as f:", "action=argparse.BooleanOptionalAction) def main(): args = parser.parse_args() filename = args.filename linenos", "= args.filename linenos = args.lineno same_length = args.same_length linenos =", "type=int) parser.add_argument(\"--same_length\", action=argparse.BooleanOptionalAction) def main(): args = parser.parse_args() filename =", "linenos[0] + 1 if linenos[0] else 100000 start = 0", "not same_length: start = 0 for lineno in linenos: results.append(\"\".join(content[start:lineno]))", "lineno in linenos: results.append(\"\".join(content[start:lineno])) start = lineno results.append(\"\".join(content[start:])) else: lineno", "result in enumerate(results): with open(f\"{name}-{i + 1:02}{ext}\", \"w\", encoding=\"utf-8\") as", "nargs=\"+\", type=int) parser.add_argument(\"--same_length\", action=argparse.BooleanOptionalAction) def main(): args = parser.parse_args() filename", "< len(content): results.append(\"\".join(content[start: start + lineno])) start += lineno name,", "+= lineno name, ext = os.path.splitext(filename) for i, result in", "name, ext = os.path.splitext(filename) for i, result in enumerate(results): with", "args = parser.parse_args() filename = args.filename linenos = args.lineno same_length", "lineno])) start += lineno name, ext = os.path.splitext(filename) for i,", "ext = os.path.splitext(filename) for i, result in enumerate(results): with open(f\"{name}-{i", "parser.add_argument(\"filename\", type=str) parser.add_argument(\"lineno\", nargs=\"+\", type=int) parser.add_argument(\"--same_length\", action=argparse.BooleanOptionalAction) def main(): args", "x: x - 1, linenos)) linenos.sort() results = [] with", "import os parser = argparse.ArgumentParser(description=\"a simple parser\") parser.add_argument(\"filename\", type=str) parser.add_argument(\"lineno\",", "start = 0 for lineno in linenos: results.append(\"\".join(content[start:lineno])) start =", "= linenos[0] + 1 if linenos[0] else 100000 start =", "f: content = f.readlines() if not same_length: start = 0", "content = f.readlines() if not same_length: start = 0 for", "start = lineno results.append(\"\".join(content[start:])) else: lineno = linenos[0] + 1", "= list(map(lambda x: x - 1, linenos)) linenos.sort() results =", "results = [] with open(filename, \"r\", encoding=\"utf-8\") as f: content", "simple parser\") parser.add_argument(\"filename\", type=str) parser.add_argument(\"lineno\", nargs=\"+\", type=int) parser.add_argument(\"--same_length\", action=argparse.BooleanOptionalAction) def", "= [] with open(filename, \"r\", encoding=\"utf-8\") as f: content =", "parser = argparse.ArgumentParser(description=\"a simple parser\") parser.add_argument(\"filename\", type=str) parser.add_argument(\"lineno\", nargs=\"+\", type=int)", "argparse.ArgumentParser(description=\"a simple parser\") parser.add_argument(\"filename\", type=str) parser.add_argument(\"lineno\", nargs=\"+\", type=int) parser.add_argument(\"--same_length\", action=argparse.BooleanOptionalAction)", "filename = args.filename linenos = args.lineno same_length = args.same_length linenos", "os parser = argparse.ArgumentParser(description=\"a simple parser\") parser.add_argument(\"filename\", type=str) parser.add_argument(\"lineno\", nargs=\"+\",", "results.append(\"\".join(content[start:])) else: lineno = linenos[0] + 1 if linenos[0] else", "results.append(\"\".join(content[start:lineno])) start = lineno results.append(\"\".join(content[start:])) else: lineno = linenos[0] +", "in linenos: results.append(\"\".join(content[start:lineno])) start = lineno results.append(\"\".join(content[start:])) else: lineno =", "encoding=\"utf-8\") as f: content = f.readlines() if not same_length: start", "len(content): results.append(\"\".join(content[start: start + lineno])) start += lineno name, ext", "if not same_length: start = 0 for lineno in linenos:", "= f.readlines() if not same_length: start = 0 for lineno", "os.path.splitext(filename) for i, result in enumerate(results): with open(f\"{name}-{i + 1:02}{ext}\",", "= args.same_length linenos = list(map(lambda x: x - 1, linenos))", "linenos)) linenos.sort() results = [] with open(filename, \"r\", encoding=\"utf-8\") as", "else 100000 start = 0 while start < len(content): results.append(\"\".join(content[start:", "open(filename, \"r\", encoding=\"utf-8\") as f: content = f.readlines() if not", "for i, result in enumerate(results): with open(f\"{name}-{i + 1:02}{ext}\", \"w\"," ]
[ "if name.startswith(\"tag:\"): name = name[4:] value = self.event.get_tag(self.tag_aliases.get(name, name)) if", "self.dist data[\"platform\"] = self.platform data[\"message\"] = self.real_message data[\"datetime\"] = self.datetime", "on the way in and we can just return #", "enough information to render a user badge. \"\"\" return self.get_interface(\"user\")", "def __init__(self, snuba_values): \"\"\" When initializing a SnubaEvent, think about", "@property def real_message(self): # XXX(mitsuhiko): this is a transitional attribute", "return self.snuba_data[\"type\"] return super(SnubaEvent, self).get_event_type() @property def ip_address(self): if \"ip_address\"", "be stripped out in __getstate__. @property def group(self): from sentry.models", "TODO(mitsuhiko): remove this code once it's unused. It's still #", "bug with unsaved # models. But the current _group_cache thing", "import truncatechars class EventDict(CanonicalKeyDict): \"\"\" Creating an instance of this", "This is a readonly event and does not support event", "this event. See ``sentry.eventtypes``. \"\"\" # For some inexplicable reason", "nodestore lookup) you may as well just initialize the event", "information if available. Grouping hashes will take into account fingerprinting", "interfaces(self): return self.get_interfaces() def get_interface(self, name): return self.interfaces.get(name) def get_legacy_message(self):", "\"message\") or \"\" ) @property def organization(self): return self.project.organization @property", "\"group_id\") def __getstate__(self): state = Model.__getstate__(self) # do not pickle", "normalize_stacktraces: normalize_stacktraces_for_grouping(self.data, config) return get_grouping_variants_for_event(self, config) def get_primary_hash(self): # TODO:", "return self.snuba_data[\"ip_address\"] return super(SnubaEvent, self).ip_address @property def title(self): if \"title\"", "this for snuba et = eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) @property def", "anyway, we may as well only fetch the minimum from", "self.title error.short_description = _(\"error\") @property def message_short(self): warnings.warn(\"Event.message_short is deprecated,", "as parse_date from django.db import models from django.utils import timezone", "we only have project_id and event_id. \"\"\" return md5(\"{}:{}\".format(project_id, event_id)).hexdigest()", "self.tags def get_tag(self, key): for t, v in self.get_tags(): if", "id when we only have project_id and event_id. \"\"\" return", "instead.\", DeprecationWarning) return self.get_tag(\"site\") @property def server_name(self): warnings.warn( \"Event.server_name is", "v) for t, v in get_path(self.data, \"tags\", filter=True) or ()", "level(self): # we might want to move to this: #", "if not hasattr(self, \"_environment_cache\"): self._environment_cache = Environment.objects.get( organization_id=self.project.organization_id, name=Environment.get_name_or_default(self.get_tag(\"environment\")), )", "def size(self): return len(json.dumps(dict(self.data))) @property def transaction(self): return self.get_tag(\"transaction\") def", "SnubaEvent.generate_node_id( self.snuba_data[\"project_id\"], self.snuba_data[\"event_id\"] ) self.data = NodeData(None, node_id, data=None, wrapper=EventDict)", "self).ip_address @property def title(self): if \"title\" in self.snuba_data: return self.snuba_data[\"title\"]", "a dictionary. If `normalize_stacktraces` is set to `True` then the", "if self.group_id: return self.data.get(\"culprit\") or self.group.culprit return self.data.get(\"culprit\") @property def", "just return # it, otherwise we defer to EventCommon implementation.", "using the same generated id when we only have project_id", "generating new ones from the data. We can only use", "[] # For compatibility, still used by plugins. def get_tags(self):", "in which case it's merged with the stored or #", "and location with dynamically generated data data[\"title\"] = self.title data[\"location\"]", "title(self): # also see event_manager.py which inserts this for snuba", "not a property? warnings.warn(\"Event.error is deprecated, use Event.title\", DeprecationWarning) return", "ref_version=2, wrapper=EventDict, ) objects = EventManager() class Meta: app_label =", "snuba self.snuba_data = snuba_values # self.data is a (lazy) dict", "a CanonicalKeyView which old sentry workers do not know #", "useful columns we can get from snuba. selected_columns = minimal_columns", "data. # ============================================ @property def tags(self): \"\"\" Override of tags", "all timestamps from snuba are UTC. return parse_date(self.timestamp).replace(tzinfo=pytz.utc) @property def", "sentry.db.models.manager import EventManager from sentry.interfaces.base import get_interfaces from sentry.utils import", "sorted(zip(keys, values)) else: return [] else: return super(SnubaEvent, self).tags def", "globally unique. The event body should be saved under this", "class SnubaEvent(EventCommon): \"\"\" An event backed by data stored in", "( isinstance(data, NodeData) and isinstance(data.data, EventDict) ) if not skip_renormalization", "if ip_address: return ip_address remote_addr = get_path(self.data, \"request\", \"env\", \"REMOTE_ADDR\")", "self.title class SnubaEvent(EventCommon): \"\"\" An event backed by data stored", "\"dist\": \"sentry:dist\", \"user\": \"sentry:user\"} def __init__(self, event): self.event = event", "self).get_event_type() @property def ip_address(self): if \"ip_address\" in self.snuba_data: return self.snuba_data[\"ip_address\"]", "hashes will take into account fingerprinting and checksums. \"\"\" #", "# Because a snuba event will never have a django", "\"datetime\"),) __repr__ = sane_repr(\"project_id\", \"group_id\") def __getstate__(self): state = Model.__getstate__(self)", "know # about state.pop(\"_project_cache\", None) state.pop(\"_environment_cache\", None) state.pop(\"_group_cache\", None) state.pop(\"interfaces\",", "] ) return rv except ValueError: # at one point", "return sorted(zip(keys, values)) else: return [] else: return super(SnubaEvent, self).tags", "\"username\": self.username, \"ip_address\": self.ip_address, } ) # If the data", "# properties need to be stripped out in __getstate__. @property", "self.data = NodeData(None, node_id, data=None, wrapper=EventDict) def __getattr__(self, name): \"\"\"", "from sentry.models import Project if not hasattr(self, \"_project_cache\"): self._project_cache =", "[] else: return super(SnubaEvent, self).tags def get_minimal_user(self): from sentry.interfaces.user import", "\"group_id\") def __init__(self, snuba_values): \"\"\" When initializing a SnubaEvent, think", "could return what currently is real_message. return ( get_path(self.data, \"logentry\",", "r\"(tag:)?[_a-z][_a-z0-9]*\" class EventSubjectTemplateData(object): tag_aliases = {\"release\": \"sentry:release\", \"dist\": \"sentry:dist\", \"user\":", "available. Grouping hashes will take into account fingerprinting and checksums.", "return self.get_tag(\"transaction\") def get_email_subject(self): template = self.project.get_option(\"mail:subject_template\") if template: template", "is deprecated, use Event.title\", DeprecationWarning) return self.title class SnubaEvent(EventCommon): \"\"\"", "OrderedDict from dateutil.parser import parse as parse_date from django.db import", "return self.get_tag(\"site\") @property def server_name(self): warnings.warn( \"Event.server_name is deprecated. Use", "return ip_address remote_addr = get_path(self.data, \"request\", \"env\", \"REMOTE_ADDR\") if remote_addr:", "# Snuba implementations of the django fields on Event #", "from the snuba timestamp \"\"\" # dateutil seems to use", "\"projectID\": return self.event.project.slug elif name == \"shortID\" and self.event.group_id: return", "`message` will be renamed to `search_message` and this # will", "idpattern = r\"(tag:)?[_a-z][_a-z0-9]*\" class EventSubjectTemplateData(object): tag_aliases = {\"release\": \"sentry:release\", \"dist\":", "= eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) @property def real_message(self): # XXX(mitsuhiko): this", "return self.event.project.slug elif name == \"shortID\" and self.event.group_id: return self.event.group.qualified_short_id", "this code once it's unused. It's still # being used", "def release(self): return self.get_tag(\"sentry:release\") @property def dist(self): return self.get_tag(\"sentry:dist\") def", "Returns the calculated hashes for the event. This uses the", "not hasattr(self, \"_project_cache\"): self._project_cache = Project.objects.get(id=self.project_id) return self._project_cache @project.setter def", "__getattr__(self, name): \"\"\" Depending on what snuba data this event", "`SnubaEvent.selected_colums` (or a subset depending on your needs) But if", "warnings.warn(\"Event.message_short is deprecated, use Event.title\", DeprecationWarning) return self.title class SnubaEvent(EventCommon):", "\"message\" in self.snuba_data: return self.snuba_data[\"message\"] return self.data.get(\"message\") @property def platform(self):", "stored in postgres. \"\"\" __core__ = False group_id = BoundedBigIntegerField(blank=True,", "return [] else: return super(SnubaEvent, self).tags def get_minimal_user(self): from sentry.interfaces.user", "protected from an IndexError? return self.get_hashes()[0] @property def title(self): #", "# is completely empty. In that case we want to", "Project if not hasattr(self, \"_project_cache\"): self._project_cache = Project.objects.get(id=self.project_id) return self._project_cache", "else: return None def get_level_display(self): # we might want to", "will send the event through basic (Rust-based) type/schema validation called", "the stored or # default config dictionary if force_config is", "self.get_tag(\"server_name\") @property def checksum(self): warnings.warn(\"Event.checksum is no longer used\", DeprecationWarning)", "is None: raise KeyError return six.text_type(value) elif name == \"project\":", "not hasattr(self, \"_environment_cache\"): self._environment_cache = Environment.objects.get( organization_id=self.project.organization_id, name=Environment.get_name_or_default(self.get_tag(\"environment\")), ) return", "tag deletions without having to rewrite nodestore blobs. \"\"\" if", "size(self): return len(json.dumps(dict(self.data))) @property def transaction(self): return self.get_tag(\"transaction\") def get_email_subject(self):", "# pickle a CanonicalKeyView which old sentry workers do not", "import get_grouping_config_dict_for_event_data return get_grouping_config_dict_for_event_data(self.data, self.project) def get_hashes(self, force_config=None): \"\"\" Returns", "in self.snuba_data: return self.snuba_data[\"location\"] return super(SnubaEvent, self).location # ==================================================== #", "not save the culprit if self.group_id: return self.data.get(\"culprit\") or self.group.culprit", "load_grouping_config from sentry.stacktraces.processing import normalize_stacktraces_for_grouping # Forcing configs has two", "self.snuba_data[name] else: return self.data[name] # ============================================ # Snuba-only implementations of", "attributes come from nodestore. \"\"\" assert all(k in snuba_values for", "Override title and location with dynamically generated data data[\"title\"] =", "because we know # all timestamps from snuba are UTC.", "\"\"\" # dateutil seems to use tzlocal() instead of UTC", "further. return self.data.get(\"metadata\") or {} def get_grouping_config(self): \"\"\"Returns the event", "instance of this dictionary will send the event through basic", "is a dict of all the stuff we got from", "None and v is not None ] ) return rv", "anyway (which requires a nodestore lookup) you may as well", "] __repr__ = sane_repr(\"project_id\", \"group_id\") def __init__(self, snuba_values): \"\"\" When", "CanonicalKeyView from sentry.utils.safe import get_path from sentry.utils.strings import truncatechars class", "# this is None the `get_grouping_variants_for_event` will fill in #", "stored_config = self.get_grouping_config() config = dict(stored_config) config[\"id\"] = force_config else:", "sentry.utils import json from sentry.utils.cache import memoize from sentry.utils.canonical import", "template = self.project.get_option(\"mail:subject_template\") if template: template = EventSubjectTemplate(template) else: template", "retrieved using the same generated id when we only have", "if template: template = EventSubjectTemplate(template) else: template = DEFAULT_SUBJECT_TEMPLATE return", "data = normalizer.normalize_event(dict(data)) CanonicalKeyDict.__init__(self, data, **kwargs) class EventCommon(object): \"\"\" Methods", "no longer used\", DeprecationWarning) return \"\" def error(self): # TODO", "or () if t is not None and v is", "db_column=\"message_id\") project_id = BoundedBigIntegerField(blank=True, null=True) message = models.TextField() platform =", "return None def get_level_display(self): # we might want to move", "(Rust-based) type/schema validation called \"re-normalization\". This is used as a", "return self.get_tag(\"sentry:release\") @property def dist(self): return self.get_tag(\"sentry:dist\") def get_raw_data(self): \"\"\"Returns", "OrderedDict to keep elements ordered for a potential JSON serializer", "TODO (alex) We need a better way to cache these", "this dictionary will send the event through basic (Rust-based) type/schema", "was initialized with, we may have the data available to", "In those cases put # the culprit in from the", "new ones from the data. We can only use #", "to `get_hashes` but will instead return the grouping components for", "backed by data stored in snuba. This is a readonly", ") from sentry.db.models.manager import EventManager from sentry.interfaces.base import get_interfaces from", "dict of everything we got from nodestore node_id = SnubaEvent.generate_node_id(", "nodestore. \"\"\" assert all(k in snuba_values for k in SnubaEvent.minimal_columns)", "provide snuba-only 'user' interface \"email\", \"ip_address\", \"user_id\", \"username\", ] __repr__", "this method could return what currently is real_message. return (", "foo), (tag, bar)) return [] # For compatibility, still used", "fetched from nodestore and bound to the data property in", "\"\"\" A minimal 'User' interface object that gives us enough", "Event. \"\"\" # The minimal list of columns we need", "config as stored. if # this is None the `get_grouping_variants_for_event`", "the type schema. \"\"\" def __init__(self, data, skip_renormalization=False, **kwargs): is_renormalized", "Snuba-only implementations of properties that # would otherwise require nodestore", "return filter( None, [x.get_hash() for x in self.get_grouping_variants(force_config).values()] ) def", "nodestore node_id = SnubaEvent.generate_node_id( self.snuba_data[\"project_id\"], self.snuba_data[\"event_id\"] ) self.data = NodeData(None,", "to render a user badge. \"\"\" return self.get_interface(\"user\") def as_dict(self):", "event grouping config.\"\"\" from sentry.grouping.api import get_grouping_config_dict_for_event_data return get_grouping_config_dict_for_event_data(self.data, self.project)", "\"\"\" if name in (\"_project_cache\", \"_group_cache\", \"_environment_cache\"): raise AttributeError() if", "available from snuba, we assume # it was already normalized", "# ends with '+00:00', so just replace the TZ with", "in self.tags] for k, v in sorted(six.iteritems(self.data)): if k in", "node_id, data=None, wrapper=EventDict) def __getattr__(self, name): \"\"\" Depending on what", "instead swithc to the actual message attribute or # this", "use them, otherwise we # fall back to generating new", "DB) will ensure the data fits the type schema. \"\"\"", "to keep elements ordered for a potential JSON serializer data", "defer to EventCommon implementation. def get_event_type(self): if \"type\" in self.snuba_data:", "\"culprit\", \"location\", \"message\", \"platform\", \"title\", \"type\", # Required to provide", "the `data` dict (which would force a nodestore load). All", "with, we may have the data available to return, or", "this event based on the project_id and event_id which together", "get_grouping_config(self): \"\"\"Returns the event grouping config.\"\"\" from sentry.grouping.api import get_grouping_config_dict_for_event_data", "for snuba et = eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) @property def culprit(self):", "project_id = BoundedBigIntegerField(blank=True, null=True) message = models.TextField() platform = models.CharField(max_length=64,", "self.snuba_data[\"message\"] return self.data.get(\"message\") @property def platform(self): if \"platform\" in self.snuba_data:", "def version(self): return self.data.get(\"version\", \"5\") @property def ip_address(self): ip_address =", "duplicated work. minimal_columns = [\"event_id\", \"group_id\", \"project_id\", \"timestamp\"] # A", "ID is given in which case it's merged with the", "return self.group.level else: return None def get_level_display(self): # we might", "about the attributes you might need to access on it.", "use a OrderedDict to keep elements ordered for a potential", "db_index=True) time_spent = BoundedIntegerField(null=True) data = NodeField( blank=True, null=True, ref_func=lambda", "@property def server_name(self): warnings.warn( \"Event.server_name is deprecated. Use Event.tags instead.\",", "an instance of this dictionary will send the event through", "\"sentry\" db_table = \"sentry_message\" verbose_name = _(\"message\") verbose_name_plural = _(\"messages\")", "1)[-1], v) for (k, v) in self.tags] for k, v", "verbose_name = _(\"message\") verbose_name_plural = _(\"messages\") unique_together = ((\"project_id\", \"event_id\"),)", "self.group: return self.group.level else: return None def get_level_display(self): # we", "= name[4:] value = self.event.get_tag(self.tag_aliases.get(name, name)) if value is None:", "`get_grouping_variants_for_event` will fill in # the default. else: config =", "that function the event data has been modified in place.", "merged with the stored or # default config dictionary if", "DeprecationWarning) return self.title class SnubaEvent(EventCommon): \"\"\" An event backed by", "else: return self.data[name] # ============================================ # Snuba-only implementations of properties", "data, **kwargs) class EventCommon(object): \"\"\" Methods and properties common to", "self.get_tags(): if t == key: return v return None @property", "data stored in postgres. \"\"\" __core__ = False group_id =", "(alex) We need a better way to cache these properties.", "snuba to bootstrap an # event. If the client is", "return # it, otherwise we defer to EventCommon implementation. def", "= models.CharField(max_length=32, null=True, db_column=\"message_id\") project_id = BoundedBigIntegerField(blank=True, null=True) message =", "self.snuba_data: return self.snuba_data[\"ip_address\"] return super(SnubaEvent, self).ip_address @property def title(self): if", "x.project_id or x.project.id, ref_version=2, wrapper=EventDict, ) objects = EventManager() class", "to fetch this on demand # again. In particular if", "these properties. functools32 # doesn't quite do the trick as", "this # will become `message`. return ( get_path(self.data, \"logentry\", \"formatted\")", "= self.real_message data[\"datetime\"] = self.datetime data[\"time_spent\"] = self.time_spent data[\"tags\"] =", "to pickle interfaces we would # pickle a CanonicalKeyView which", "return self.data.get(\"type\", \"default\") def get_event_metadata(self): \"\"\" Return the metadata of", "name)) if value is None: raise KeyError return six.text_type(value) elif", "EventSubjectTemplateData(object): tag_aliases = {\"release\": \"sentry:release\", \"dist\": \"sentry:dist\", \"user\": \"sentry:user\"} def", "unique. The event body should be saved under this key", "means that after calling that function the event data has", "inserts this for snuba et = eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) @property", "event object (or loading it from the DB) will ensure", "ensure the data fits the type schema. \"\"\" def __init__(self,", "initialize the event with `SnubaEvent.minimal_colums` and let the rest of", "super(SnubaEvent, self).tags def get_minimal_user(self): from sentry.interfaces.user import User return User.to_python(", "if not hasattr(self, \"_group_cache\"): self._group_cache = Group.objects.get(id=self.group_id) return self._group_cache @group.setter", "and let the rest of of the attributes come from", "self.get_tag('level') or self.group.get_level_display() if self.group: return self.group.get_level_display() else: return None", "@property def location(self): if \"location\" in self.snuba_data: return self.snuba_data[\"location\"] return", "compatibility, still used by plugins. def get_tags(self): return self.tags def", "= dict(stored_config) config[\"id\"] = force_config else: config = force_config #", "is completely empty. In that case we want to hobble", "from dateutil.parser import parse as parse_date from django.db import models", "group. if data.get(\"culprit\") is None and self.group_id: data[\"culprit\"] = self.group.culprit", "Event.tags instead.\", DeprecationWarning) return self.get_tag(\"site\") @property def server_name(self): warnings.warn( \"Event.server_name", "@property def message(self): if \"message\" in self.snuba_data: return self.snuba_data[\"message\"] return", "= sane_repr(\"project_id\", \"group_id\") def __getstate__(self): state = Model.__getstate__(self) # do", "128).encode( \"utf-8\" ) def get_environment(self): from sentry.models import Environment if", "bar)) return [] # For compatibility, still used by plugins.", "still # being used by plugin code and once the", "============================================ # DEPRECATED # ============================================ @property def level(self): # we", "class EventDict(CanonicalKeyDict): \"\"\" Creating an instance of this dictionary will", "Return the metadata of this event. See ``sentry.eventtypes``. \"\"\" #", "a different config. if force_config is None: hashes = self.data.get(\"hashes\")", "event body. This might be useful for implementing tag deletions", "return et.get_location(self.get_event_metadata()) @property def real_message(self): # XXX(mitsuhiko): this is a", "sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config from sentry.stacktraces.processing import normalize_stacktraces_for_grouping # Forcing", "<reponame>Ali-Tahir/sentry from __future__ import absolute_import import six import string import", "code once it's unused. It's still # being used by", "class EventSubjectTemplateData(object): tag_aliases = {\"release\": \"sentry:release\", \"dist\": \"sentry:dist\", \"user\": \"sentry:user\"}", "do not force a different config. if force_config is None:", "for a potential JSON serializer data = OrderedDict() data[\"event_id\"] =", "Snuba implementations of the django fields on Event # ====================================================", "the culprit if self.group_id: return self.data.get(\"culprit\") or self.group.culprit return self.data.get(\"culprit\")", "user badge. \"\"\" return self.get_interface(\"user\") def as_dict(self): \"\"\"Returns the data", "self.snuba_data = snuba_values # self.data is a (lazy) dict of", "# TODO: This *might* need to be protected from an", "import get_grouping_variants_for_event, load_grouping_config from sentry.stacktraces.processing import normalize_stacktraces_for_grouping # Forcing configs", "Use Event.tags instead.\", DeprecationWarning) return self.get_tag(\"logger\") @property def site(self): warnings.warn(\"Event.site", "self.event.organization.slug elif name == \"title\": return self.event.title raise KeyError DEFAULT_SUBJECT_TEMPLATE", "by plugin code and once the message rename is through", "def location(self): if \"location\" in self.snuba_data: return self.snuba_data[\"location\"] return super(SnubaEvent,", "@property def site(self): warnings.warn(\"Event.site is deprecated. Use Event.tags instead.\", DeprecationWarning)", "and this # will become `message`. return ( get_path(self.data, \"logentry\",", "= r\"(tag:)?[_a-z][_a-z0-9]*\" class EventSubjectTemplateData(object): tag_aliases = {\"release\": \"sentry:release\", \"dist\": \"sentry:dist\",", "@property def ip_address(self): if \"ip_address\" in self.snuba_data: return self.snuba_data[\"ip_address\"] return", "`data` dict (which would force a nodestore load). All unresolved", "A list of all useful columns we can get from", "different config. if force_config is None: hashes = self.data.get(\"hashes\") if", "this is None the `get_grouping_variants_for_event` will fill in # the", "return [] # For compatibility, still used by plugins. def", "self.release data[\"dist\"] = self.dist data[\"platform\"] = self.platform data[\"message\"] = self.real_message", "be saved under this key in nodestore so it can", "fits the type schema. \"\"\" def __init__(self, data, skip_renormalization=False, **kwargs):", "id anyway. return self.event_id def save(self): raise NotImplementedError class Event(EventCommon,", "self.group.culprit return self.data.get(\"culprit\") @property def location(self): # also see event_manager.py", "having to rewrite nodestore blobs. \"\"\" if \"tags.key\" in self.snuba_data", "def group(self): from sentry.models import Group if not self.group_id: return", "def error(self): # TODO why is this not a property?", "NodeData(None, node_id, data=None, wrapper=EventDict) def __getattr__(self, name): \"\"\" Depending on", "StoreNormalizer(is_renormalize=True, enable_trimming=False) data = normalizer.normalize_event(dict(data)) CanonicalKeyDict.__init__(self, data, **kwargs) class EventCommon(object):", "\"title\", \"type\", # Required to provide snuba-only tags \"tags.key\", \"tags.value\",", "is deprecated. Use Event.tags instead.\", DeprecationWarning) return self.get_tag(\"logger\") @property def", "An event backed by data stored in snuba. This is", "be retrieved using the same generated id when we only", "= self.title data[\"location\"] = self.location return data # ============================================ #", "for x in self.get_grouping_variants(force_config).values()] ) def get_grouping_variants(self, force_config=None, normalize_stacktraces=False): \"\"\"", "the client is planning on loading the entire event body", "= models.DateTimeField(default=timezone.now, db_index=True) time_spent = BoundedIntegerField(null=True) data = NodeField( blank=True,", "and values and len(keys) == len(values): return sorted(zip(keys, values)) else:", "get_grouping_config_dict_for_event_data return get_grouping_config_dict_for_event_data(self.data, self.project) def get_hashes(self, force_config=None): \"\"\" Returns the", "filter( None, [x.get_hash() for x in self.get_grouping_variants(force_config).values()] ) def get_grouping_variants(self,", "@property def checksum(self): warnings.warn(\"Event.checksum is no longer used\", DeprecationWarning) return", "class Event(EventCommon, Model): \"\"\" An event backed by data stored", "quite do the trick as there is a reference bug", "and self.event.group_id: return self.event.group.qualified_short_id elif name == \"orgID\": return self.event.organization.slug", "normalize_stacktraces_for_grouping(self.data, config) return get_grouping_variants_for_event(self, config) def get_primary_hash(self): # TODO: This", "what snuba data this event was initialized with, we may", "__getitem__(self, name): if name.startswith(\"tag:\"): name = name[4:] value = self.event.get_tag(self.tag_aliases.get(name,", "data[\"location\"] = self.location return data # ============================================ # DEPRECATED #", "[ \"culprit\", \"location\", \"message\", \"platform\", \"title\", \"type\", # Required to", "# the hex event_id here. We should be moving to", "otherwise we defer to EventCommon implementation. def get_event_type(self): if \"type\"", "we know # all timestamps from snuba are UTC. return", "force_config else: config = force_config # Otherwise we just use", "type for `Event.data` such that creating an event object (or", "Methods and properties common to both Event and SnubaEvent. \"\"\"", "# ============================================ @property def level(self): # we might want to", "if normalize_stacktraces: normalize_stacktraces_for_grouping(self.data, config) return get_grouping_variants_for_event(self, config) def get_primary_hash(self): #", "support event creation or save. The basic event data is", "the string # ends with '+00:00', so just replace the", "used\", DeprecationWarning) return \"\" def error(self): # TODO why is", "of columns we need to get from snuba to bootstrap", "in self.get_tags(): if t == key: return v return None", "return self.snuba_data[\"culprit\"] return super(SnubaEvent, self).culprit @property def location(self): if \"location\"", "data data[\"title\"] = self.title data[\"location\"] = self.location return data #", "Forcing configs has two separate modes. One is where just", "instead.\", DeprecationWarning ) return self.get_tag(\"server_name\") @property def checksum(self): warnings.warn(\"Event.checksum is", "Event.title\", DeprecationWarning) return self.title error.short_description = _(\"error\") @property def message_short(self):", "in self.snuba_data: return self.snuba_data[\"ip_address\"] return super(SnubaEvent, self).ip_address @property def title(self):", "# If we have hashes stored in the data we", "to return, or we may have to look in the", "db_table = \"sentry_message\" verbose_name = _(\"message\") verbose_name_plural = _(\"messages\") unique_together", "event. See ``sentry.eventtypes``. \"\"\" # For some inexplicable reason we", "a (lazy) dict of everything we got from nodestore node_id", "will come through here. \"\"\" if name in (\"_project_cache\", \"_group_cache\",", "would otherwise require nodestore data. # ============================================ @property def tags(self):", "(lazy) dict of everything we got from nodestore node_id =", "# return LOG_LEVELS_MAP.get(self.get_level_display()) or self.group.level if self.group: return self.group.level else:", "if data.get(\"culprit\") is None and self.group_id: data[\"culprit\"] = self.group.culprit #", "body should be saved under this key in nodestore so", "rest of of the attributes come from nodestore. \"\"\" assert", "snuba. This is a readonly event and does not support", "stored or # default config dictionary if force_config is not", "as a regular Event. \"\"\" # The minimal list of", "under this key in nodestore so it can be retrieved", "organization_id=self.project.organization_id, name=Environment.get_name_or_default(self.get_tag(\"environment\")), ) return self._environment_cache def get_minimal_user(self): \"\"\" A minimal", "is not None: return hashes return filter( None, [x.get_hash() for", "fields on Event # ==================================================== @property def datetime(self): \"\"\" Reconstruct", "\"user_id\", \"username\", ] __repr__ = sane_repr(\"project_id\", \"group_id\") def __init__(self, snuba_values):", "checksum(self): warnings.warn(\"Event.checksum is no longer used\", DeprecationWarning) return \"\" def", "memoize from sentry.utils.canonical import CanonicalKeyDict, CanonicalKeyView from sentry.utils.safe import get_path", "attributes you might need to access on it. If you", "If you only need a few properties, and they are", "= isinstance(data, EventDict) or ( isinstance(data, NodeData) and isinstance(data.data, EventDict)", "if self.group: return self.group.get_level_display() else: return None # deprecated accessors", "you may as well just initialize the event with `SnubaEvent.minimal_colums`", "both Event and SnubaEvent. \"\"\" @classmethod def generate_node_id(cls, project_id, event_id):", "to access on it. If you only need a few", "Model, NodeData, NodeField, sane_repr, ) from sentry.db.models.manager import EventManager from", "def transaction(self): return self.get_tag(\"transaction\") def get_email_subject(self): template = self.project.get_option(\"mail:subject_template\") if", "and event_id. \"\"\" return md5(\"{}:{}\".format(project_id, event_id)).hexdigest() # TODO (alex) We", "return self.get_tag('level') or self.group.get_level_display() if self.group: return self.group.get_level_display() else: return", "return state class EventSubjectTemplate(string.Template): idpattern = r\"(tag:)?[_a-z][_a-z0-9]*\" class EventSubjectTemplateData(object): tag_aliases", "# Forcing configs has two separate modes. One is where", "self).title @property def culprit(self): if \"culprit\" in self.snuba_data: return self.snuba_data[\"culprit\"]", "which old sentry workers do not know # about state.pop(\"_project_cache\",", "also clunky because these # properties need to be stripped", "except ValueError: # at one point Sentry allowed invalid tag", "data[\"tags\"] = [(k.split(\"sentry:\", 1)[-1], v) for (k, v) in self.tags]", "remote_addr = get_path(self.data, \"request\", \"env\", \"REMOTE_ADDR\") if remote_addr: return remote_addr", "state = Model.__getstate__(self) # do not pickle cached info. We", "become `message`. return ( get_path(self.data, \"logentry\", \"formatted\") or get_path(self.data, \"logentry\",", "columns we need to get from snuba to bootstrap an", "location with dynamically generated data data[\"title\"] = self.title data[\"location\"] =", "# TODO why is this not a property? warnings.warn(\"Event.error is", "self.title data[\"location\"] = self.location return data # ============================================ # DEPRECATED", "point Sentry allowed invalid tag sets such as (foo, bar)", "def site(self): warnings.warn(\"Event.site is deprecated. Use Event.tags instead.\", DeprecationWarning) return", "# dateutil seems to use tzlocal() instead of UTC even", "where we never # have to reference the row id", "super(SnubaEvent, self).title @property def culprit(self): if \"culprit\" in self.snuba_data: return", "def message(self): if \"message\" in self.snuba_data: return self.snuba_data[\"message\"] return self.data.get(\"message\")", "get_primary_hash(self): # TODO: This *might* need to be protected from", "get_path(self.data, \"user\", \"ip_address\") if ip_address: return ip_address remote_addr = get_path(self.data,", "in __getstate__. @property def group(self): from sentry.models import Group if", "we never # have to reference the row id anyway.", "attribute or # this method could return what currently is", "event body is fetched from nodestore and bound to the", "a nodestore lookup) you may as well just initialize the", "otherwise we # fall back to generating new ones from", "data[k] = v # for a long time culprit was", "{v_k: v_v for v_k, v_v in six.iteritems(v) if v_k !=", "inserts this for snuba et = eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) @property", "'+00:00', so just replace the TZ with UTC because we", "\"sentry_message\" verbose_name = _(\"message\") verbose_name_plural = _(\"messages\") unique_together = ((\"project_id\",", "event_id): \"\"\" Returns a deterministic node_id for this event based", "is a (lazy) dict of everything we got from nodestore", "DeprecationWarning ) return self.get_tag(\"server_name\") @property def checksum(self): warnings.warn(\"Event.checksum is no", "self.snuba_data[\"title\"] return super(SnubaEvent, self).title @property def culprit(self): if \"culprit\" in", "ValueError: # at one point Sentry allowed invalid tag sets", "not None and v is not None ] ) return", "just the # config ID is given in which case", "In particular if we were to pickle interfaces we would", "case we want to hobble along # further. return self.data.get(\"metadata\")", "\"user\", \"ip_address\") if ip_address: return ip_address remote_addr = get_path(self.data, \"request\",", "the internal raw event data dict.\"\"\" return dict(self.data.items()) @property def", "None and self.group_id: data[\"culprit\"] = self.group.culprit # Override title and", "a OrderedDict to keep elements ordered for a potential JSON", "_group_cache thing is also clunky because these # properties need", "@property def platform(self): if \"platform\" in self.snuba_data: return self.snuba_data[\"platform\"] return", "client is planning on loading the entire event body from", "return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode( \"utf-8\" ) def get_environment(self): from sentry.models import", "getattr(self, \"tags.value\") if keys and values and len(keys) == len(values):", "\"\"\" An event backed by data stored in postgres. \"\"\"", "import ugettext_lazy as _ from hashlib import md5 from semaphore.processing", "which case it's merged with the stored or # default", "with `SnubaEvent.minimal_colums` and let the rest of of the attributes", "may have the data available to return, or we may", "six.string_types): stored_config = self.get_grouping_config() config = dict(stored_config) config[\"id\"] = force_config", "is None the `get_grouping_variants_for_event` will fill in # the default.", "to bootstrap an # event. If the client is planning", "force_config # Otherwise we just use the same grouping config", "@classmethod def generate_node_id(cls, project_id, event_id): \"\"\" Returns a deterministic node_id", "return remote_addr return None @property def tags(self): try: rv =", "you might need to access on it. If you only", "= SnubaEvent.generate_node_id( self.snuba_data[\"project_id\"], self.snuba_data[\"event_id\"] ) self.data = NodeData(None, node_id, data=None,", "name = name[4:] value = self.event.get_tag(self.tag_aliases.get(name, name)) if value is", "warnings.warn( \"Event.server_name is deprecated. Use Event.tags instead.\", DeprecationWarning ) return", "implementation. def get_event_type(self): if \"type\" in self.snuba_data: return self.snuba_data[\"type\"] return", "we use them, otherwise we # fall back to generating", "project.id self._project_cache = project def get_interfaces(self): return CanonicalKeyView(get_interfaces(self.data)) @memoize def", "\"\"\" def __init__(self, data, skip_renormalization=False, **kwargs): is_renormalized = isinstance(data, EventDict)", "rename is through # plugins should instead swithc to the", ") def get_environment(self): from sentry.models import Environment if not hasattr(self,", "place. \"\"\" from sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config from sentry.stacktraces.processing import", "and the event body is fetched from nodestore and bound", "(or a subset depending on your needs) But if you", "similar to `get_hashes` but will instead return the grouping components", "is None and self.group_id: data[\"culprit\"] = self.group.culprit # Override title", "readonly event and does not support event creation or save.", "SnubaEvent, think about the attributes you might need to access", "without having to rewrite nodestore blobs. \"\"\" if \"tags.key\" in", "data stored in snuba. This is a readonly event and", "None def get_level_display(self): # we might want to move to", "# would otherwise require nodestore data. # ============================================ @property def", "= self.project_id data[\"release\"] = self.release data[\"dist\"] = self.dist data[\"platform\"] =", "it was already normalized on the way in and we", "self._project_cache = project def get_interfaces(self): return CanonicalKeyView(get_interfaces(self.data)) @memoize def interfaces(self):", "save the culprit if self.group_id: return self.data.get(\"culprit\") or self.group.culprit return", "data in normalized form for external consumers.\"\"\" # We use", "snuba, then you should use `SnubaEvent.selected_colums` (or a subset depending", "and len(keys) == len(values): return sorted(zip(keys, values)) else: return []", "the data we use them, otherwise we # fall back", "v in sorted(six.iteritems(self.data)): if k in data: continue if k", "type of this event. See ``sentry.eventtypes``. \"\"\" return self.data.get(\"type\", \"default\")", "event_id here. We should be moving to a world where", "data = OrderedDict() data[\"event_id\"] = self.event_id data[\"project\"] = self.project_id data[\"release\"]", "properties that # would otherwise require nodestore data. # ============================================", "KeyError return six.text_type(value) elif name == \"project\": return self.event.project.get_full_name() elif", "data for these is available from snuba, we assume #", "event body from # nodestore anyway, we may as well", "calculated hashes for the event. This uses the stored information", "\"_group_cache\"): self._group_cache = Group.objects.get(id=self.group_id) return self._group_cache @group.setter def group(self, group):", "event creation or save. The basic event data is fetched", "self.snuba_data: return self.snuba_data[name] else: return self.data[name] # ============================================ # Snuba-only", "project_id and event_id which together are globally unique. The event", "= BoundedBigIntegerField(blank=True, null=True) message = models.TextField() platform = models.CharField(max_length=64, null=True)", "return md5(\"{}:{}\".format(project_id, event_id)).hexdigest() # TODO (alex) We need a better", "def location(self): # also see event_manager.py which inserts this for", "in from the group. if data.get(\"culprit\") is None and self.group_id:", "list of columns we need to get from snuba to", "might need to access on it. If you only need", "for the event. This uses the stored information if available.", "\"ip_address\" in self.snuba_data: return self.snuba_data[\"ip_address\"] return super(SnubaEvent, self).ip_address @property def", "you should use `SnubaEvent.selected_colums` (or a subset depending on your", "elif name == \"shortID\" and self.event.group_id: return self.event.group.qualified_short_id elif name", "the event body is fetched from nodestore and bound to", "return super(SnubaEvent, self).culprit @property def location(self): if \"location\" in self.snuba_data:", "Depending on what snuba data this event was initialized with,", "hashes is not None: return hashes return filter( None, [x.get_hash()", "# ============================================ @property def tags(self): \"\"\" Override of tags property", "super(SnubaEvent, self).culprit @property def location(self): if \"location\" in self.snuba_data: return", "Otherwise we just use the same grouping config as stored.", "given in which case it's merged with the stored or", "def get_event_metadata(self): \"\"\" Return the metadata of this event. See", "them, otherwise we # fall back to generating new ones", "def id(self): # Because a snuba event will never have", "super(SnubaEvent, self).get_event_type() @property def ip_address(self): if \"ip_address\" in self.snuba_data: return", "self.event.project.slug elif name == \"shortID\" and self.event.group_id: return self.event.group.qualified_short_id elif", "`normalize_stacktraces` is set to `True` then the event data will", "In that case we want to hobble along # further.", "snuba_values for k in SnubaEvent.minimal_columns) # self.snuba_data is a dict", "got from nodestore node_id = SnubaEvent.generate_node_id( self.snuba_data[\"project_id\"], self.snuba_data[\"event_id\"] ) self.data", "# doesn't quite do the trick as there is a", "the event through basic (Rust-based) type/schema validation called \"re-normalization\". This", "@project.setter def project(self, project): if project is None: self.project_id =", "to generating new ones from the data. We can only", "all the stuff we got from snuba self.snuba_data = snuba_values", "self.event.project.get_full_name() elif name == \"projectID\": return self.event.project.slug elif name ==", "Event.tags instead.\", DeprecationWarning) return self.get_tag(\"logger\") @property def site(self): warnings.warn(\"Event.site is", "used by plugin code and once the message rename is", "\"username\", ] __repr__ = sane_repr(\"project_id\", \"group_id\") def __init__(self, snuba_values): \"\"\"", "if \"location\" in self.snuba_data: return self.snuba_data[\"location\"] return super(SnubaEvent, self).location #", "= self.event.get_tag(self.tag_aliases.get(name, name)) if value is None: raise KeyError return", "django.utils import timezone from django.utils.translation import ugettext_lazy as _ from", "need to get from snuba to bootstrap an # event.", "with '+00:00', so just replace the TZ with UTC because", "data # ============================================ # DEPRECATED # ============================================ @property def level(self):", "then you should use `SnubaEvent.selected_colums` (or a subset depending on", "See ``sentry.eventtypes``. \"\"\" # For some inexplicable reason we have", "return self._environment_cache def get_minimal_user(self): \"\"\" A minimal 'User' interface object", "from snuba self.snuba_data = snuba_values # self.data is a (lazy)", "name in (\"_project_cache\", \"_group_cache\", \"_environment_cache\"): raise AttributeError() if name in", "needs) But if you know you are going to need", "a user badge. \"\"\" return self.get_interface(\"user\") def as_dict(self): \"\"\"Returns the", "self.data.get(\"version\", \"5\") @property def ip_address(self): ip_address = get_path(self.data, \"user\", \"ip_address\")", "the calculated hashes for the event. This uses the stored", "# ============================================ # Snuba-only implementations of properties that # would", "self.snuba_data: return self.snuba_data[\"message\"] return self.data.get(\"message\") @property def platform(self): if \"platform\"", "def message_short(self): warnings.warn(\"Event.message_short is deprecated, use Event.title\", DeprecationWarning) return self.title", "and event_id which together are globally unique. The event body", "= load_grouping_config(config) if normalize_stacktraces: normalize_stacktraces_for_grouping(self.data, config) return get_grouping_variants_for_event(self, config) def", ") def get_grouping_variants(self, force_config=None, normalize_stacktraces=False): \"\"\" This is similar to", "**kwargs) class EventCommon(object): \"\"\" Methods and properties common to both", "import normalize_stacktraces_for_grouping # Forcing configs has two separate modes. One", "the entire event body anyway (which requires a nodestore lookup)", "# A list of all useful columns we can get", "name): return self.interfaces.get(name) def get_legacy_message(self): # TODO(mitsuhiko): remove this code", "# also see event_manager.py which inserts this for snuba et", "we might want to move to this: # return LOG_LEVELS_MAP.get(self.get_level_display())", "in self.snuba_data: keys = getattr(self, \"tags.key\") values = getattr(self, \"tags.value\")", "def dist(self): return self.get_tag(\"sentry:dist\") def get_raw_data(self): \"\"\"Returns the internal raw", "sentry.utils.canonical import CanonicalKeyDict, CanonicalKeyView from sentry.utils.safe import get_path from sentry.utils.strings", "is through # plugins should instead swithc to the actual", "generated id when we only have project_id and event_id. \"\"\"", "separate modes. One is where just the # config ID", "import Project if not hasattr(self, \"_project_cache\"): self._project_cache = Project.objects.get(id=self.project_id) return", "return CanonicalKeyView(get_interfaces(self.data)) @memoize def interfaces(self): return self.get_interfaces() def get_interface(self, name):", "event. See ``sentry.eventtypes``. \"\"\" return self.data.get(\"type\", \"default\") def get_event_metadata(self): \"\"\"", "tag sets such as (foo, bar) # vs ((tag, foo),", "UTC. return parse_date(self.timestamp).replace(tzinfo=pytz.utc) @property def time_spent(self): return None @property def", "t, v in get_path(self.data, \"tags\", filter=True) or () if t", "we need to get from snuba to bootstrap an #", "NotImplementedError class Event(EventCommon, Model): \"\"\" An event backed by data", "= models.CharField(max_length=64, null=True) datetime = models.DateTimeField(default=timezone.now, db_index=True) time_spent = BoundedIntegerField(null=True)", "self.foo type accesses will come through here. \"\"\" if name", "Returns a deterministic node_id for this event based on the", "we were to pickle interfaces we would # pickle a", "We need a better way to cache these properties. functools32", "way to cache these properties. functools32 # doesn't quite do", "isinstance(data, EventDict) or ( isinstance(data, NodeData) and isinstance(data.data, EventDict) )", "# the culprit in from the group. if data.get(\"culprit\") is", "not hasattr(self, \"_group_cache\"): self._group_cache = Group.objects.get(id=self.group_id) return self._group_cache @group.setter def", "= NodeField( blank=True, null=True, ref_func=lambda x: x.project_id or x.project.id, ref_version=2,", "which together are globally unique. The event body should be", "self.email, \"username\": self.username, \"ip_address\": self.ip_address, } ) # If the", "\"_project_cache\"): self._project_cache = Project.objects.get(id=self.project_id) return self._project_cache @project.setter def project(self, project):", "StoreNormalizer from sentry import eventtypes from sentry.db.models import ( BoundedBigIntegerField,", "the datetime of this event from the snuba timestamp \"\"\"", "same generated id when we only have project_id and event_id.", "code and once the message rename is through # plugins", "self.data.get(\"hashes\") if hashes is not None: return hashes return filter(", "None: self.project_id = None else: self.project_id = project.id self._project_cache =", "we defer to EventCommon implementation. def get_event_type(self): if \"type\" in", "and \"tags.value\" in self.snuba_data: keys = getattr(self, \"tags.key\") values =", "longer used\", DeprecationWarning) return \"\" def error(self): # TODO why", "*might* need to be protected from an IndexError? return self.get_hashes()[0]", "might want to move to this: # return self.get_tag('level') or", "all useful columns we can get from snuba. selected_columns =", "@property def project(self): from sentry.models import Project if not hasattr(self,", "name == \"orgID\": return self.event.organization.slug elif name == \"title\": return", "on what snuba data this event was initialized with, we", "keep elements ordered for a potential JSON serializer data =", "as_dict(self): \"\"\"Returns the data in normalized form for external consumers.\"\"\"", "the `get_grouping_variants_for_event` will fill in # the default. else: config", "entire event body from # nodestore anyway, we may as", "regular Event. \"\"\" # The minimal list of columns we", "\"\"\" Return the metadata of this event. See ``sentry.eventtypes``. \"\"\"", "v_v for v_k, v_v in six.iteritems(v) if v_k != \"client_ip\"}", "the django fields on Event # ==================================================== @property def datetime(self):", "site(self): warnings.warn(\"Event.site is deprecated. Use Event.tags instead.\", DeprecationWarning) return self.get_tag(\"site\")", "def culprit(self): if \"culprit\" in self.snuba_data: return self.snuba_data[\"culprit\"] return super(SnubaEvent,", "import Group if not self.group_id: return None if not hasattr(self,", "an event object (or loading it from the DB) will", "all available in snuba, then you should use `SnubaEvent.selected_colums` (or", "return self.event.project.get_full_name() elif name == \"projectID\": return self.event.project.slug elif name", "should use `SnubaEvent.selected_colums` (or a subset depending on your needs)", "def get_legacy_message(self): # TODO(mitsuhiko): remove this code once it's unused.", "on loading the entire event body from # nodestore anyway,", "body anyway (which requires a nodestore lookup) you may as", "The basic event data is fetched from snuba, and the", "in the `data` dict (which would force a nodestore load).", "lookup) you may as well just initialize the event with", "truncatechars class EventDict(CanonicalKeyDict): \"\"\" Creating an instance of this dictionary", "_(\"message\") verbose_name_plural = _(\"messages\") unique_together = ((\"project_id\", \"event_id\"),) index_together =", "message_short(self): warnings.warn(\"Event.message_short is deprecated, use Event.title\", DeprecationWarning) return self.title class", "# will become `message`. return ( get_path(self.data, \"logentry\", \"formatted\") or", "gives us enough information to render a user badge. \"\"\"", "self.snuba_data and \"tags.value\" in self.snuba_data: keys = getattr(self, \"tags.key\") values", "'user' interface \"email\", \"ip_address\", \"user_id\", \"username\", ] __repr__ = sane_repr(\"project_id\",", "rv except ValueError: # at one point Sentry allowed invalid", "event and does not support event creation or save. The", "One is where just the # config ID is given", "event variants being created. This means that after calling that", "\"timestamp\"] # A list of all useful columns we can", "self.project_id = None else: self.project_id = project.id self._project_cache = project", "to this: # return LOG_LEVELS_MAP.get(self.get_level_display()) or self.group.level if self.group: return", "if \"platform\" in self.snuba_data: return self.snuba_data[\"platform\"] return self.data.get(\"platform\") @property def", "is a transitional attribute that should be # removed. `message`", "dict.\"\"\" return dict(self.data.items()) @property def size(self): return len(json.dumps(dict(self.data))) @property def", "\"sentry:release\", \"dist\": \"sentry:dist\", \"user\": \"sentry:user\"} def __init__(self, event): self.event =", "def get_event_type(self): \"\"\" Return the type of this event. See", "tags from snuba rather than the nodestore event body. This", "rewrite nodestore blobs. \"\"\" if \"tags.key\" in self.snuba_data and \"tags.value\"", "null=True) message = models.TextField() platform = models.CharField(max_length=64, null=True) datetime =", "provide snuba-only tags \"tags.key\", \"tags.value\", # Required to provide snuba-only", "hashlib import md5 from semaphore.processing import StoreNormalizer from sentry import", "elif name == \"project\": return self.event.project.get_full_name() elif name == \"projectID\":", "timestamps from snuba are UTC. return parse_date(self.timestamp).replace(tzinfo=pytz.utc) @property def time_spent(self):", "``sentry.eventtypes``. \"\"\" return self.data.get(\"type\", \"default\") def get_event_metadata(self): \"\"\" Return the", "Event.title\", DeprecationWarning) return self.title class SnubaEvent(EventCommon): \"\"\" An event backed", "we # fall back to generating new ones from the", "values and len(keys) == len(values): return sorted(zip(keys, values)) else: return", "@property def tags(self): \"\"\" Override of tags property that uses", "{\"release\": \"sentry:release\", \"dist\": \"sentry:dist\", \"user\": \"sentry:user\"} def __init__(self, event): self.event", "look in the `data` dict (which would force a nodestore", "want to move to this: # return self.get_tag('level') or self.group.get_level_display()", "return self.data.get(\"culprit\") @property def location(self): # also see event_manager.py which", "is a readonly event and does not support event creation", "else: template = DEFAULT_SUBJECT_TEMPLATE return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode( \"utf-8\" ) def", "it, otherwise we defer to EventCommon implementation. def get_event_type(self): if", "will instead return the grouping components for each variant in", "name): if name.startswith(\"tag:\"): name = name[4:] value = self.event.get_tag(self.tag_aliases.get(name, name))", "raise KeyError return six.text_type(value) elif name == \"project\": return self.event.project.get_full_name()", "If the client is planning on loading the entire event", "== \"orgID\": return self.event.organization.slug elif name == \"title\": return self.event.title", "here. We should be moving to a world where we", "is None: hashes = self.data.get(\"hashes\") if hashes is not None:", "config. if force_config is None: hashes = self.data.get(\"hashes\") if hashes", "event data is fetched from snuba, and the event body", "eventtypes from sentry.db.models import ( BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField,", "@property def location(self): # also see event_manager.py which inserts this", "calling that function the event data has been modified in", "parse_date(self.timestamp).replace(tzinfo=pytz.utc) @property def time_spent(self): return None @property def message(self): if", "be modified for `in_app` in addition to event variants being", "if name in (\"_project_cache\", \"_group_cache\", \"_environment_cache\"): raise AttributeError() if name", "@memoize def interfaces(self): return self.get_interfaces() def get_interface(self, name): return self.interfaces.get(name)", "sentry.utils.safe import get_path from sentry.utils.strings import truncatechars class EventDict(CanonicalKeyDict): \"\"\"", "if we do not force a different config. if force_config", ") @property def organization(self): return self.project.organization @property def version(self): return", "\"email\": self.email, \"username\": self.username, \"ip_address\": self.ip_address, } ) # If", "@property def title(self): # also see event_manager.py which inserts this", "only need a few properties, and they are all available", "datetime(self): \"\"\" Reconstruct the datetime of this event from the", "= StoreNormalizer(is_renormalize=True, enable_trimming=False) data = normalizer.normalize_event(dict(data)) CanonicalKeyDict.__init__(self, data, **kwargs) class", "property that uses tags from snuba rather than the nodestore", "into account fingerprinting and checksums. \"\"\" # If we have", "\"\"\"Returns the internal raw event data dict.\"\"\" return dict(self.data.items()) @property", "and isinstance(data.data, EventDict) ) if not skip_renormalization and not is_renormalized:", "get_level_display(self): # we might want to move to this: #", "dict(self.data.items()) @property def size(self): return len(json.dumps(dict(self.data))) @property def transaction(self): return", "self.event.group.qualified_short_id elif name == \"orgID\": return self.event.organization.slug elif name ==", "interface object that gives us enough information to render a", "Override of tags property that uses tags from snuba rather", "self.snuba_data[\"platform\"] return self.data.get(\"platform\") @property def id(self): # Because a snuba", "event data will be modified for `in_app` in addition to", "self.message ) def get_event_type(self): \"\"\" Return the type of this", "self.get_tag(\"sentry:release\") @property def dist(self): return self.get_tag(\"sentry:dist\") def get_raw_data(self): \"\"\"Returns the", "out in __getstate__. @property def group(self): from sentry.models import Group", "where the data # is completely empty. In that case", "hashes = self.data.get(\"hashes\") if hashes is not None: return hashes", "the same way as a regular Event. \"\"\" # The", "the current _group_cache thing is also clunky because these #", "self.data[name] # ============================================ # Snuba-only implementations of properties that #", "return self.project.organization @property def version(self): return self.data.get(\"version\", \"5\") @property def", "\"\"\" Creating an instance of this dictionary will send the", "loading the entire event body from # nodestore anyway, we", "it. If you only need a few properties, and they", "= self.event_id data[\"project\"] = self.project_id data[\"release\"] = self.release data[\"dist\"] =", "\"\"\" # The minimal list of columns we need to", "back to generating new ones from the data. We can", "== \"shortID\" and self.event.group_id: return self.event.group.qualified_short_id elif name == \"orgID\":", "django fields on Event # ==================================================== @property def datetime(self): \"\"\"", "For some inexplicable reason we have some cases where the", "self.data.get(\"grouping_config\") config = load_grouping_config(config) if normalize_stacktraces: normalize_stacktraces_for_grouping(self.data, config) return get_grouping_variants_for_event(self,", "get_hashes(self, force_config=None): \"\"\" Returns the calculated hashes for the event.", "for these is available from snuba, we assume # it", "this not a property? warnings.warn(\"Event.error is deprecated, use Event.title\", DeprecationWarning)", "of the attributes come from nodestore. \"\"\" assert all(k in", "v_k, v_v in six.iteritems(v) if v_k != \"client_ip\"} data[k] =", "some inexplicable reason we have some cases where the data", "warnings.warn(\"Event.site is deprecated. Use Event.tags instead.\", DeprecationWarning) return self.get_tag(\"site\") @property", "from sentry.interfaces.user import User return User.to_python( { \"id\": self.user_id, \"email\":", "= False group_id = BoundedBigIntegerField(blank=True, null=True) event_id = models.CharField(max_length=32, null=True,", "dateutil.parser import parse as parse_date from django.db import models from", "external consumers.\"\"\" # We use a OrderedDict to keep elements", "\"\"\" When initializing a SnubaEvent, think about the attributes you", "\"location\" in self.snuba_data: return self.snuba_data[\"location\"] return super(SnubaEvent, self).location # ====================================================", "models.CharField(max_length=32, null=True, db_column=\"message_id\") project_id = BoundedBigIntegerField(blank=True, null=True) message = models.TextField()", "a better way to cache these properties. functools32 # doesn't", "is deprecated, use Event.title\", DeprecationWarning) return self.title error.short_description = _(\"error\")", "while events did not save the culprit if self.group_id: return", "\"ip_address\", \"user_id\", \"username\", ] __repr__ = sane_repr(\"project_id\", \"group_id\") def __init__(self,", "cache these properties. functools32 # doesn't quite do the trick", "@property def logger(self): warnings.warn(\"Event.logger is deprecated. Use Event.tags instead.\", DeprecationWarning)", "all(k in snuba_values for k in SnubaEvent.minimal_columns) # self.snuba_data is", "real_message(self): # XXX(mitsuhiko): this is a transitional attribute that should", "with unsaved # models. But the current _group_cache thing is", "might want to move to this: # return LOG_LEVELS_MAP.get(self.get_level_display()) or", "require nodestore data. # ============================================ @property def tags(self): \"\"\" Override", "from nodestore node_id = SnubaEvent.generate_node_id( self.snuba_data[\"project_id\"], self.snuba_data[\"event_id\"] ) self.data =", "stored in the data we use them, otherwise we #", "nodestore so it can be retrieved using the same generated", "{ \"id\": self.user_id, \"email\": self.email, \"username\": self.username, \"ip_address\": self.ip_address, }", "is real_message. return ( get_path(self.data, \"logentry\", \"formatted\") or get_path(self.data, \"logentry\",", "empty. In that case we want to hobble along #", "def get_hashes(self, force_config=None): \"\"\" Returns the calculated hashes for the", "isinstance(force_config, six.string_types): stored_config = self.get_grouping_config() config = dict(stored_config) config[\"id\"] =", "(which would force a nodestore load). All unresolved self.foo type", "to reference the row id anyway. return self.event_id def save(self):", "and once the message rename is through # plugins should", "transaction(self): return self.get_tag(\"transaction\") def get_email_subject(self): template = self.project.get_option(\"mail:subject_template\") if template:", "pickle interfaces we would # pickle a CanonicalKeyView which old", "\"id\": self.user_id, \"email\": self.email, \"username\": self.username, \"ip_address\": self.ip_address, } )", "event_id. \"\"\" return md5(\"{}:{}\".format(project_id, event_id)).hexdigest() # TODO (alex) We need", "# Required to provide snuba-only 'user' interface \"email\", \"ip_address\", \"user_id\",", "state.pop(\"_project_cache\", None) state.pop(\"_environment_cache\", None) state.pop(\"_group_cache\", None) state.pop(\"interfaces\", None) return state", "been modified in place. \"\"\" from sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config", "fetch this on demand # again. In particular if we", "think about the attributes you might need to access on", "get_grouping_variants(self, force_config=None, normalize_stacktraces=False): \"\"\" This is similar to `get_hashes` but", "None) state.pop(\"_group_cache\", None) state.pop(\"interfaces\", None) return state class EventSubjectTemplate(string.Template): idpattern", "if not skip_renormalization and not is_renormalized: normalizer = StoreNormalizer(is_renormalize=True, enable_trimming=False)", "= OrderedDict() data[\"event_id\"] = self.event_id data[\"project\"] = self.project_id data[\"release\"] =", "from snuba, and the event body is fetched from nodestore", "elements ordered for a potential JSON serializer data = OrderedDict()", "# deprecated accessors @property def logger(self): warnings.warn(\"Event.logger is deprecated. Use", "location(self): if \"location\" in self.snuba_data: return self.snuba_data[\"location\"] return super(SnubaEvent, self).location", "return self.title class SnubaEvent(EventCommon): \"\"\" An event backed by data", "= ((\"group_id\", \"datetime\"),) __repr__ = sane_repr(\"project_id\", \"group_id\") def __getstate__(self): state", "the snuba timestamp \"\"\" # dateutil seems to use tzlocal()", "None @property def tags(self): try: rv = sorted( [ (t,", "config.\"\"\" from sentry.grouping.api import get_grouping_config_dict_for_event_data return get_grouping_config_dict_for_event_data(self.data, self.project) def get_hashes(self,", "default. else: config = self.data.get(\"grouping_config\") config = load_grouping_config(config) if normalize_stacktraces:", "will take into account fingerprinting and checksums. \"\"\" # If", "null=True) datetime = models.DateTimeField(default=timezone.now, db_index=True) time_spent = BoundedIntegerField(null=True) data =", "data. We can only use # this if we do", "depending on your needs) But if you know you are", "self.snuba_data[\"event_id\"] ) self.data = NodeData(None, node_id, data=None, wrapper=EventDict) def __getattr__(self,", "\"title\" in self.snuba_data: return self.snuba_data[\"title\"] return super(SnubaEvent, self).title @property def", "the event. This uses the stored information if available. Grouping", "data[\"dist\"] = self.dist data[\"platform\"] = self.platform data[\"message\"] = self.real_message data[\"datetime\"]", "in self.snuba_data: return self.snuba_data[\"type\"] return super(SnubaEvent, self).get_event_type() @property def ip_address(self):", "get_legacy_message(self): # TODO(mitsuhiko): remove this code once it's unused. It's", "__init__(self, data, skip_renormalization=False, **kwargs): is_renormalized = isinstance(data, EventDict) or (", "the attributes you might need to access on it. If", "Model.__getstate__(self) # do not pickle cached info. We want to", "this event was initialized with, we may have the data", "id(self): # Because a snuba event will never have a", "not pickle cached info. We want to fetch this on", "event_id)).hexdigest() # TODO (alex) We need a better way to", "do not know # about state.pop(\"_project_cache\", None) state.pop(\"_environment_cache\", None) state.pop(\"_group_cache\",", "properties common to both Event and SnubaEvent. \"\"\" @classmethod def", "tags property that uses tags from snuba rather than the", "modified in place. \"\"\" from sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config from", "or we may have to look in the `data` dict", "because these # properties need to be stripped out in", "is_renormalized: normalizer = StoreNormalizer(is_renormalize=True, enable_trimming=False) data = normalizer.normalize_event(dict(data)) CanonicalKeyDict.__init__(self, data,", "return self.title error.short_description = _(\"error\") @property def message_short(self): warnings.warn(\"Event.message_short is", "None @property def release(self): return self.get_tag(\"sentry:release\") @property def dist(self): return", "!= \"client_ip\"} data[k] = v # for a long time", "self.snuba_data[\"ip_address\"] return super(SnubaEvent, self).ip_address @property def title(self): if \"title\" in", "is where just the # config ID is given in", "= normalizer.normalize_event(dict(data)) CanonicalKeyDict.__init__(self, data, **kwargs) class EventCommon(object): \"\"\" Methods and", "= None else: self.project_id = project.id self._project_cache = project def", "dictionary. If `normalize_stacktraces` is set to `True` then the event", "may have to look in the `data` dict (which would", "and checksums. \"\"\" # If we have hashes stored in", "= Environment.objects.get( organization_id=self.project.organization_id, name=Environment.get_name_or_default(self.get_tag(\"environment\")), ) return self._environment_cache def get_minimal_user(self): \"\"\"", "EventCommon(object): \"\"\" Methods and properties common to both Event and", "have the data available to return, or we may have", "to rewrite nodestore blobs. \"\"\" if \"tags.key\" in self.snuba_data and", "@property def title(self): if \"title\" in self.snuba_data: return self.snuba_data[\"title\"] return", "nodestore load). All unresolved self.foo type accesses will come through", "Sentry allowed invalid tag sets such as (foo, bar) #", "the project_id and event_id which together are globally unique. The", "name=Environment.get_name_or_default(self.get_tag(\"environment\")), ) return self._environment_cache def get_minimal_user(self): \"\"\" A minimal 'User'", "in self.get_grouping_variants(force_config).values()] ) def get_grouping_variants(self, force_config=None, normalize_stacktraces=False): \"\"\" This is", "return ( get_path(self.data, \"logentry\", \"formatted\") or get_path(self.data, \"logentry\", \"message\") or", "filter=True) or () if t is not None and v", "minimal list of columns we need to get from snuba", "(or loading it from the DB) will ensure the data", "= _(\"messages\") unique_together = ((\"project_id\", \"event_id\"),) index_together = ((\"group_id\", \"datetime\"),)", "rather than the nodestore event body. This might be useful", "is_renormalized = isinstance(data, EventDict) or ( isinstance(data, NodeData) and isinstance(data.data,", "the minimum from snuba to # avoid duplicated work. minimal_columns", "one point Sentry allowed invalid tag sets such as (foo,", "self.project.get_option(\"mail:subject_template\") if template: template = EventSubjectTemplate(template) else: template = DEFAULT_SUBJECT_TEMPLATE", "the event data has been modified in place. \"\"\" from", "\"\"\" Override of tags property that uses tags from snuba", "use Event.title\", DeprecationWarning) return self.title error.short_description = _(\"error\") @property def", "@property def datetime(self): \"\"\" Reconstruct the datetime of this event", "we have hashes stored in the data we use them,", ") self.data = NodeData(None, node_id, data=None, wrapper=EventDict) def __getattr__(self, name):", "get_event_type(self): if \"type\" in self.snuba_data: return self.snuba_data[\"type\"] return super(SnubaEvent, self).get_event_type()", "== \"title\": return self.event.title raise KeyError DEFAULT_SUBJECT_TEMPLATE = EventSubjectTemplate(\"$shortID -", "DEFAULT_SUBJECT_TEMPLATE return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode( \"utf-8\" ) def get_environment(self): from sentry.models", "event data has been modified in place. \"\"\" from sentry.grouping.api", "in addition to event variants being created. This means that", "(t, v) for t, v in get_path(self.data, \"tags\", filter=True) or", "to `True` then the event data will be modified for", "is a reference bug with unsaved # models. But the", "CanonicalKeyView(get_interfaces(self.data)) @memoize def interfaces(self): return self.get_interfaces() def get_interface(self, name): return", "def __getattr__(self, name): \"\"\" Depending on what snuba data this", "on it. If you only need a few properties, and", "TODO: This *might* need to be protected from an IndexError?", "in a dictionary. If `normalize_stacktraces` is set to `True` then", "you are going to need the entire event body anyway", "and SnubaEvent. \"\"\" @classmethod def generate_node_id(cls, project_id, event_id): \"\"\" Returns", "a property? warnings.warn(\"Event.error is deprecated, use Event.title\", DeprecationWarning) return self.title", "removed. `message` will be renamed to `search_message` and this #", "cases where the data # is completely empty. In that", "or \"\" ) @property def organization(self): return self.project.organization @property def", "@property def version(self): return self.data.get(\"version\", \"5\") @property def ip_address(self): ip_address", "\"\"\" Reconstruct the datetime of this event from the snuba", "from django.utils import timezone from django.utils.translation import ugettext_lazy as _", "EventSubjectTemplate(string.Template): idpattern = r\"(tag:)?[_a-z][_a-z0-9]*\" class EventSubjectTemplateData(object): tag_aliases = {\"release\": \"sentry:release\",", "event was initialized with, we may have the data available", "CanonicalKeyView which old sentry workers do not know # about", "keys = getattr(self, \"tags.key\") values = getattr(self, \"tags.value\") if keys", "sentry.utils.strings import truncatechars class EventDict(CanonicalKeyDict): \"\"\" Creating an instance of", "hashes stored in the data we use them, otherwise we", "self.get_interfaces() def get_interface(self, name): return self.interfaces.get(name) def get_legacy_message(self): # TODO(mitsuhiko):", "event_id = models.CharField(max_length=32, null=True, db_column=\"message_id\") project_id = BoundedBigIntegerField(blank=True, null=True) message", "to the actual message attribute or # this method could", "in the same way as a regular Event. \"\"\" #", "EventSubjectTemplate(template) else: template = DEFAULT_SUBJECT_TEMPLATE return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode( \"utf-8\" )", "self.datetime data[\"time_spent\"] = self.time_spent data[\"tags\"] = [(k.split(\"sentry:\", 1)[-1], v) for", "that should be # removed. `message` will be renamed to", "continue if k == \"sdk\": v = {v_k: v_v for", "else: config = self.data.get(\"grouping_config\") config = load_grouping_config(config) if normalize_stacktraces: normalize_stacktraces_for_grouping(self.data,", "deprecated, use Event.title\", DeprecationWarning) return self.title error.short_description = _(\"error\") @property", "parse_date from django.db import models from django.utils import timezone from", "know you are going to need the entire event body", "six import string import warnings import pytz from collections import", "\"logentry\", \"message\") or self.message ) def get_event_type(self): \"\"\" Return the", "None if not hasattr(self, \"_group_cache\"): self._group_cache = Group.objects.get(id=self.group_id) return self._group_cache", "if remote_addr: return remote_addr return None @property def tags(self): try:", "raise NotImplementedError class Event(EventCommon, Model): \"\"\" An event backed by", "we may have the data available to return, or we", "string # ends with '+00:00', so just replace the TZ", "the event grouping config.\"\"\" from sentry.grouping.api import get_grouping_config_dict_for_event_data return get_grouping_config_dict_for_event_data(self.data,", "\"tags.key\", \"tags.value\", # Required to provide snuba-only 'user' interface \"email\",", "self._group_cache = group @property def project(self): from sentry.models import Project", "self.get_interface(\"user\") def as_dict(self): \"\"\"Returns the data in normalized form for", "skip_renormalization=False, **kwargs): is_renormalized = isinstance(data, EventDict) or ( isinstance(data, NodeData)", "that after calling that function the event data has been", "blank=True, null=True, ref_func=lambda x: x.project_id or x.project.id, ref_version=2, wrapper=EventDict, )", ") def get_event_type(self): \"\"\" Return the type of this event.", "k in data: continue if k == \"sdk\": v =", "value is None: raise KeyError return six.text_type(value) elif name ==", "well only fetch the minimum from snuba to # avoid", "parse as parse_date from django.db import models from django.utils import", "= Group.objects.get(id=self.group_id) return self._group_cache @group.setter def group(self, group): self.group_id =", "fetch the minimum from snuba to # avoid duplicated work.", "return get_grouping_variants_for_event(self, config) def get_primary_hash(self): # TODO: This *might* need", "\"\" def error(self): # TODO why is this not a", "we just use the same grouping config as stored. if", "A minimal 'User' interface object that gives us enough information", "__getstate__(self): state = Model.__getstate__(self) # do not pickle cached info.", "_ from hashlib import md5 from semaphore.processing import StoreNormalizer from", "unsaved # models. But the current _group_cache thing is also", "For compatibility, still used by plugins. def get_tags(self): return self.tags", "# removed. `message` will be renamed to `search_message` and this", "self.get_tag(\"transaction\") def get_email_subject(self): template = self.project.get_option(\"mail:subject_template\") if template: template =", "we may as well only fetch the minimum from snuba", "v = {v_k: v_v for v_k, v_v in six.iteritems(v) if", "take into account fingerprinting and checksums. \"\"\" # If we", "if # this is None the `get_grouping_variants_for_event` will fill in", "from collections import OrderedDict from dateutil.parser import parse as parse_date", "class EventCommon(object): \"\"\" Methods and properties common to both Event", "self.get_grouping_config() config = dict(stored_config) config[\"id\"] = force_config else: config =", "get_minimal_user(self): from sentry.interfaces.user import User return User.to_python( { \"id\": self.user_id,", "have to reference the row id anyway. return self.event_id def", "deletions without having to rewrite nodestore blobs. \"\"\" if \"tags.key\"", "\"client_ip\"} data[k] = v # for a long time culprit", "snuba data this event was initialized with, we may have", "__future__ import absolute_import import six import string import warnings import", "a world where we never # have to reference the", "or self.group.culprit return self.data.get(\"culprit\") @property def location(self): # also see", "minimal_columns + [ \"culprit\", \"location\", \"message\", \"platform\", \"title\", \"type\", #", "project(self): from sentry.models import Project if not hasattr(self, \"_project_cache\"): self._project_cache", "nodestore event body. This might be useful for implementing tag", "Use Event.tags instead.\", DeprecationWarning ) return self.get_tag(\"server_name\") @property def checksum(self):", "# all timestamps from snuba are UTC. return parse_date(self.timestamp).replace(tzinfo=pytz.utc) @property", "be moving to a world where we never # have", "event based on the project_id and event_id which together are", "= BoundedBigIntegerField(blank=True, null=True) event_id = models.CharField(max_length=32, null=True, db_column=\"message_id\") project_id =", "two separate modes. One is where just the # config", "\"tags.key\" in self.snuba_data and \"tags.value\" in self.snuba_data: keys = getattr(self,", "from nodestore and bound to the data property in the", "[x.get_hash() for x in self.get_grouping_variants(force_config).values()] ) def get_grouping_variants(self, force_config=None, normalize_stacktraces=False):", ") return rv except ValueError: # at one point Sentry", "# it, otherwise we defer to EventCommon implementation. def get_event_type(self):", "normalized on the way in and we can just return", "data[\"time_spent\"] = self.time_spent data[\"tags\"] = [(k.split(\"sentry:\", 1)[-1], v) for (k,", "\"\"\" Methods and properties common to both Event and SnubaEvent.", "= [\"event_id\", \"group_id\", \"project_id\", \"timestamp\"] # A list of all", "for each variant in a dictionary. If `normalize_stacktraces` is set", "config dictionary if force_config is not None: if isinstance(force_config, six.string_types):", "come from nodestore. \"\"\" assert all(k in snuba_values for k", "data[\"release\"] = self.release data[\"dist\"] = self.dist data[\"platform\"] = self.platform data[\"message\"]", "is also clunky because these # properties need to be", "import OrderedDict from dateutil.parser import parse as parse_date from django.db", "sorted( [ (t, v) for t, v in get_path(self.data, \"tags\",", "then the event data will be modified for `in_app` in", "completely empty. In that case we want to hobble along", "consumers.\"\"\" # We use a OrderedDict to keep elements ordered", "again. In particular if we were to pickle interfaces we", "import CanonicalKeyDict, CanonicalKeyView from sentry.utils.safe import get_path from sentry.utils.strings import", "render a user badge. \"\"\" return self.get_interface(\"user\") def as_dict(self): \"\"\"Returns", "self.user_id, \"email\": self.email, \"username\": self.username, \"ip_address\": self.ip_address, } ) #", "self.event_id data[\"project\"] = self.project_id data[\"release\"] = self.release data[\"dist\"] = self.dist", "\"logentry\", \"message\") or \"\" ) @property def organization(self): return self.project.organization", "object (or loading it from the DB) will ensure the", "Return the type of this event. See ``sentry.eventtypes``. \"\"\" return", "or save. The basic event data is fetched from snuba,", "verbose_name_plural = _(\"messages\") unique_together = ((\"project_id\", \"event_id\"),) index_together = ((\"group_id\",", "# For compatibility, still used by plugins. def get_tags(self): return", "along # further. return self.data.get(\"metadata\") or {} def get_grouping_config(self): \"\"\"Returns", "these is available from snuba, we assume # it was", "`get_hashes` but will instead return the grouping components for each", "it's unused. It's still # being used by plugin code", "md5(\"{}:{}\".format(project_id, event_id)).hexdigest() # TODO (alex) We need a better way", "remote_addr return None @property def tags(self): try: rv = sorted(", "\"\"\" return self.data.get(\"type\", \"default\") def get_event_metadata(self): \"\"\" Return the metadata", "by data stored in postgres. \"\"\" __core__ = False group_id", "k == \"sdk\": v = {v_k: v_v for v_k, v_v", "def group(self, group): self.group_id = group.id self._group_cache = group @property", "x.project.id, ref_version=2, wrapper=EventDict, ) objects = EventManager() class Meta: app_label", "get_path from sentry.utils.strings import truncatechars class EventDict(CanonicalKeyDict): \"\"\" Creating an", "import md5 from semaphore.processing import StoreNormalizer from sentry import eventtypes", "import warnings import pytz from collections import OrderedDict from dateutil.parser", "stored information if available. Grouping hashes will take into account", "as well just initialize the event with `SnubaEvent.minimal_colums` and let", "# event. If the client is planning on loading the", "never # have to reference the row id anyway. return", "the stuff we got from snuba self.snuba_data = snuba_values #", "return self.interfaces.get(name) def get_legacy_message(self): # TODO(mitsuhiko): remove this code once", "well just initialize the event with `SnubaEvent.minimal_colums` and let the", "template: template = EventSubjectTemplate(template) else: template = DEFAULT_SUBJECT_TEMPLATE return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)),", "return the grouping components for each variant in a dictionary.", "An event backed by data stored in postgres. \"\"\" __core__", "v in get_path(self.data, \"tags\", filter=True) or () if t is", "self._project_cache = Project.objects.get(id=self.project_id) return self._project_cache @project.setter def project(self, project): if", "not skip_renormalization and not is_renormalized: normalizer = StoreNormalizer(is_renormalize=True, enable_trimming=False) data", "return None @property def release(self): return self.get_tag(\"sentry:release\") @property def dist(self):", "`message`. return ( get_path(self.data, \"logentry\", \"formatted\") or get_path(self.data, \"logentry\", \"message\")", "# If the data for these is available from snuba,", "implementations of properties that # would otherwise require nodestore data.", "reason we have some cases where the data # is", "sentry.interfaces.user import User return User.to_python( { \"id\": self.user_id, \"email\": self.email,", "was not persisted. In those cases put # the culprit", "def get_email_subject(self): template = self.project.get_option(\"mail:subject_template\") if template: template = EventSubjectTemplate(template)", "t, v in self.get_tags(): if t == key: return v", "nodestore blobs. \"\"\" if \"tags.key\" in self.snuba_data and \"tags.value\" in", "from __future__ import absolute_import import six import string import warnings", "else: return super(SnubaEvent, self).tags def get_minimal_user(self): from sentry.interfaces.user import User", "**kwargs): is_renormalized = isinstance(data, EventDict) or ( isinstance(data, NodeData) and", "way as a regular Event. \"\"\" # The minimal list", "# fall back to generating new ones from the data.", "function the event data has been modified in place. \"\"\"", "the data available to return, or we may have to", "body. This might be useful for implementing tag deletions without", "or ( isinstance(data, NodeData) and isinstance(data.data, EventDict) ) if not", "@property def dist(self): return self.get_tag(\"sentry:dist\") def get_raw_data(self): \"\"\"Returns the internal", "this if we do not force a different config. if", "k, v in sorted(six.iteritems(self.data)): if k in data: continue if", "raw event data dict.\"\"\" return dict(self.data.items()) @property def size(self): return", "that creating an event object (or loading it from the", "if hashes is not None: return hashes return filter( None,", "this for snuba et = eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) @property def", "self.snuba_data[\"type\"] return super(SnubaEvent, self).get_event_type() @property def ip_address(self): if \"ip_address\" in", "# ==================================================== @property def datetime(self): \"\"\" Reconstruct the datetime of", "this on demand # again. In particular if we were", "not None: if isinstance(force_config, six.string_types): stored_config = self.get_grouping_config() config =", "def get_environment(self): from sentry.models import Environment if not hasattr(self, \"_environment_cache\"):", "wrapper type for `Event.data` such that creating an event object", "project): if project is None: self.project_id = None else: self.project_id", "try: rv = sorted( [ (t, v) for t, v", "plugin code and once the message rename is through #", "super(SnubaEvent, self).location # ==================================================== # Snuba implementations of the django", "or self.group.level if self.group: return self.group.level else: return None def", "from the data. We can only use # this if", "return et.get_title(self.get_event_metadata()) @property def culprit(self): # For a while events", "postgres. \"\"\" __core__ = False group_id = BoundedBigIntegerField(blank=True, null=True) event_id", "project_id and event_id. \"\"\" return md5(\"{}:{}\".format(project_id, event_id)).hexdigest() # TODO (alex)", "a deterministic node_id for this event based on the project_id", "self.project_id = project.id self._project_cache = project def get_interfaces(self): return CanonicalKeyView(get_interfaces(self.data))", "deprecated. Use Event.tags instead.\", DeprecationWarning) return self.get_tag(\"logger\") @property def site(self):", "a few properties, and they are all available in snuba,", "= self.time_spent data[\"tags\"] = [(k.split(\"sentry:\", 1)[-1], v) for (k, v)", "if value is None: raise KeyError return six.text_type(value) elif name", "location(self): # also see event_manager.py which inserts this for snuba", "is not None: if isinstance(force_config, six.string_types): stored_config = self.get_grouping_config() config", "with the stored or # default config dictionary if force_config", "return self.get_interface(\"user\") def as_dict(self): \"\"\"Returns the data in normalized form", "culprit in from the group. if data.get(\"culprit\") is None and", "the metadata of this event. See ``sentry.eventtypes``. \"\"\" # For", "\"email\", \"ip_address\", \"user_id\", \"username\", ] __repr__ = sane_repr(\"project_id\", \"group_id\") def", "Event and SnubaEvent. \"\"\" @classmethod def generate_node_id(cls, project_id, event_id): \"\"\"", "def server_name(self): warnings.warn( \"Event.server_name is deprecated. Use Event.tags instead.\", DeprecationWarning", "UTC even though the string # ends with '+00:00', so", "eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) @property def culprit(self): # For a while", "creation or save. The basic event data is fetched from", "Group if not self.group_id: return None if not hasattr(self, \"_group_cache\"):", "if \"tags.key\" in self.snuba_data and \"tags.value\" in self.snuba_data: keys =", "have to look in the `data` dict (which would force", "be # removed. `message` will be renamed to `search_message` and", "a while events did not save the culprit if self.group_id:", "( get_path(self.data, \"logentry\", \"formatted\") or get_path(self.data, \"logentry\", \"message\") or self.message", "deterministic node_id for this event based on the project_id and", "@property def culprit(self): if \"culprit\" in self.snuba_data: return self.snuba_data[\"culprit\"] return", "\"formatted\") or get_path(self.data, \"logentry\", \"message\") or self.message ) def get_event_type(self):", "value = self.event.get_tag(self.tag_aliases.get(name, name)) if value is None: raise KeyError", "== \"sdk\": v = {v_k: v_v for v_k, v_v in", "= self.get_grouping_config() config = dict(stored_config) config[\"id\"] = force_config else: config", "Grouping hashes will take into account fingerprinting and checksums. \"\"\"", "the culprit in from the group. if data.get(\"culprit\") is None", "\"\"\"Returns the event grouping config.\"\"\" from sentry.grouping.api import get_grouping_config_dict_for_event_data return", "event through basic (Rust-based) type/schema validation called \"re-normalization\". This is", "that case we want to hobble along # further. return", "et = eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) @property def culprit(self): # For", "return super(SnubaEvent, self).location # ==================================================== # Snuba implementations of the", "get_grouping_variants_for_event(self, config) def get_primary_hash(self): # TODO: This *might* need to", "need a better way to cache these properties. functools32 #", "When initializing a SnubaEvent, think about the attributes you might", "\"type\" in self.snuba_data: return self.snuba_data[\"type\"] return super(SnubaEvent, self).get_event_type() @property def", "selected_columns = minimal_columns + [ \"culprit\", \"location\", \"message\", \"platform\", \"title\",", "deprecated accessors @property def logger(self): warnings.warn(\"Event.logger is deprecated. Use Event.tags", "v) in self.tags] for k, v in sorted(six.iteritems(self.data)): if k", "and self.group_id: data[\"culprit\"] = self.group.culprit # Override title and location", "len(values): return sorted(zip(keys, values)) else: return [] else: return super(SnubaEvent,", "if we were to pickle interfaces we would # pickle", "does not support event creation or save. The basic event", "of this event from the snuba timestamp \"\"\" # dateutil", "((\"group_id\", \"datetime\"),) __repr__ = sane_repr(\"project_id\", \"group_id\") def __getstate__(self): state =", "============================================ @property def tags(self): \"\"\" Override of tags property that", "None) state.pop(\"_environment_cache\", None) state.pop(\"_group_cache\", None) state.pop(\"interfaces\", None) return state class", "CanonicalKeyDict.__init__(self, data, **kwargs) class EventCommon(object): \"\"\" Methods and properties common", "_(\"messages\") unique_together = ((\"project_id\", \"event_id\"),) index_together = ((\"group_id\", \"datetime\"),) __repr__", "your needs) But if you know you are going to", "time culprit was not persisted. In those cases put #", "send the event through basic (Rust-based) type/schema validation called \"re-normalization\".", "sets such as (foo, bar) # vs ((tag, foo), (tag,", "old sentry workers do not know # about state.pop(\"_project_cache\", None)", "= self.data.get(\"hashes\") if hashes is not None: return hashes return", "= {\"release\": \"sentry:release\", \"dist\": \"sentry:dist\", \"user\": \"sentry:user\"} def __init__(self, event):", "the way in and we can just return # it,", "data we use them, otherwise we # fall back to", "snuba to # avoid duplicated work. minimal_columns = [\"event_id\", \"group_id\",", "of all useful columns we can get from snuba. selected_columns", "import Environment if not hasattr(self, \"_environment_cache\"): self._environment_cache = Environment.objects.get( organization_id=self.project.organization_id,", "than the nodestore event body. This might be useful for", "null=True) event_id = models.CharField(max_length=32, null=True, db_column=\"message_id\") project_id = BoundedBigIntegerField(blank=True, null=True)", "return self.event_id def save(self): raise NotImplementedError class Event(EventCommon, Model): \"\"\"", "\"formatted\") or get_path(self.data, \"logentry\", \"message\") or \"\" ) @property def", "error(self): # TODO why is this not a property? warnings.warn(\"Event.error", "current _group_cache thing is also clunky because these # properties", "return self.data.get(\"metadata\") or {} def get_grouping_config(self): \"\"\"Returns the event grouping", "(which requires a nodestore lookup) you may as well just", "name in self.snuba_data: return self.snuba_data[name] else: return self.data[name] # ============================================", "this is a transitional attribute that should be # removed.", "() if t is not None and v is not", "if k in data: continue if k == \"sdk\": v", "__repr__ = sane_repr(\"project_id\", \"group_id\") def __getstate__(self): state = Model.__getstate__(self) #", "\"\"\" # For some inexplicable reason we have some cases", "need a few properties, and they are all available in", "we can get from snuba. selected_columns = minimal_columns + [", "values)) else: return [] else: return super(SnubaEvent, self).tags def get_minimal_user(self):", "# Override title and location with dynamically generated data data[\"title\"]", "long time culprit was not persisted. In those cases put", "(k, v) in self.tags] for k, v in sorted(six.iteritems(self.data)): if", "def title(self): # also see event_manager.py which inserts this for", "for external consumers.\"\"\" # We use a OrderedDict to keep", "each variant in a dictionary. If `normalize_stacktraces` is set to", "do the trick as there is a reference bug with", "to provide snuba-only 'user' interface \"email\", \"ip_address\", \"user_id\", \"username\", ]", "of tags property that uses tags from snuba rather than", "in # the default. else: config = self.data.get(\"grouping_config\") config =", "None) state.pop(\"interfaces\", None) return state class EventSubjectTemplate(string.Template): idpattern = r\"(tag:)?[_a-z][_a-z0-9]*\"", "or self.message ) def get_event_type(self): \"\"\" Return the type of", "put # the culprit in from the group. if data.get(\"culprit\")", "enable_trimming=False) data = normalizer.normalize_event(dict(data)) CanonicalKeyDict.__init__(self, data, **kwargs) class EventCommon(object): \"\"\"", "id, just return # the hex event_id here. We should", "def ip_address(self): ip_address = get_path(self.data, \"user\", \"ip_address\") if ip_address: return", "(\"_project_cache\", \"_group_cache\", \"_environment_cache\"): raise AttributeError() if name in self.snuba_data: return", "from django.db import models from django.utils import timezone from django.utils.translation", "from sentry.grouping.api import get_grouping_config_dict_for_event_data return get_grouping_config_dict_for_event_data(self.data, self.project) def get_hashes(self, force_config=None):", "OrderedDict() data[\"event_id\"] = self.event_id data[\"project\"] = self.project_id data[\"release\"] = self.release", "real_message. return ( get_path(self.data, \"logentry\", \"formatted\") or get_path(self.data, \"logentry\", \"message\")", "# being used by plugin code and once the message", "only fetch the minimum from snuba to # avoid duplicated", "did not save the culprit if self.group_id: return self.data.get(\"culprit\") or", "message = models.TextField() platform = models.CharField(max_length=64, null=True) datetime = models.DateTimeField(default=timezone.now,", "self.group_id: return self.data.get(\"culprit\") or self.group.culprit return self.data.get(\"culprit\") @property def location(self):", "len(keys) == len(values): return sorted(zip(keys, values)) else: return [] else:", "dateutil seems to use tzlocal() instead of UTC even though", "from snuba, we assume # it was already normalized on", "of all the stuff we got from snuba self.snuba_data =", "None: if isinstance(force_config, six.string_types): stored_config = self.get_grouping_config() config = dict(stored_config)", "the data for these is available from snuba, we assume", "\"\"\"Returns the data in normalized form for external consumers.\"\"\" #", "once it's unused. It's still # being used by plugin", "implementations of the django fields on Event # ==================================================== @property", "a nodestore load). All unresolved self.foo type accesses will come", "get_path(self.data, \"logentry\", \"formatted\") or get_path(self.data, \"logentry\", \"message\") or self.message )", "a django row id, just return # the hex event_id", "event backed by data stored in postgres. \"\"\" __core__ =", "minimal 'User' interface object that gives us enough information to", "a transitional attribute that should be # removed. `message` will", "# again. In particular if we were to pickle interfaces", "def project(self, project): if project is None: self.project_id = None", "from sentry.interfaces.base import get_interfaces from sentry.utils import json from sentry.utils.cache", "# do not pickle cached info. We want to fetch", "group @property def project(self): from sentry.models import Project if not", "config ID is given in which case it's merged with", "data[\"project\"] = self.project_id data[\"release\"] = self.release data[\"dist\"] = self.dist data[\"platform\"]", "need to be stripped out in __getstate__. @property def group(self):", "accessors @property def logger(self): warnings.warn(\"Event.logger is deprecated. Use Event.tags instead.\",", "hashes return filter( None, [x.get_hash() for x in self.get_grouping_variants(force_config).values()] )", "self.interfaces.get(name) def get_legacy_message(self): # TODO(mitsuhiko): remove this code once it's", "seems to use tzlocal() instead of UTC even though the", "and bound to the data property in the same way", "return self._group_cache @group.setter def group(self, group): self.group_id = group.id self._group_cache", "badge. \"\"\" return self.get_interface(\"user\") def as_dict(self): \"\"\"Returns the data in", "if self.group: return self.group.level else: return None def get_level_display(self): #", "come through here. \"\"\" if name in (\"_project_cache\", \"_group_cache\", \"_environment_cache\"):", "This means that after calling that function the event data", "called \"re-normalization\". This is used as a wrapper type for", "\"project\": return self.event.project.get_full_name() elif name == \"projectID\": return self.event.project.slug elif", "= \"sentry\" db_table = \"sentry_message\" verbose_name = _(\"message\") verbose_name_plural =", "The event body should be saved under this key in", "not None: return hashes return filter( None, [x.get_hash() for x", "or x.project.id, ref_version=2, wrapper=EventDict, ) objects = EventManager() class Meta:", "grouping components for each variant in a dictionary. If `normalize_stacktraces`", "DeprecationWarning) return self.get_tag(\"logger\") @property def site(self): warnings.warn(\"Event.site is deprecated. Use", "__getstate__. @property def group(self): from sentry.models import Group if not", "in self.snuba_data: return self.snuba_data[name] else: return self.data[name] # ============================================ #", "# ============================================ # DEPRECATED # ============================================ @property def level(self): #", "potential JSON serializer data = OrderedDict() data[\"event_id\"] = self.event_id data[\"project\"]", "if \"ip_address\" in self.snuba_data: return self.snuba_data[\"ip_address\"] return super(SnubaEvent, self).ip_address @property", "just use the same grouping config as stored. if #", "\"Event.server_name is deprecated. Use Event.tags instead.\", DeprecationWarning ) return self.get_tag(\"server_name\")", "culprit(self): if \"culprit\" in self.snuba_data: return self.snuba_data[\"culprit\"] return super(SnubaEvent, self).culprit", "+ [ \"culprit\", \"location\", \"message\", \"platform\", \"title\", \"type\", # Required", "We should be moving to a world where we never", "variant in a dictionary. If `normalize_stacktraces` is set to `True`", "be renamed to `search_message` and this # will become `message`.", "objects = EventManager() class Meta: app_label = \"sentry\" db_table =", "import six import string import warnings import pytz from collections", "such as (foo, bar) # vs ((tag, foo), (tag, bar))", "return self.group.get_level_display() else: return None # deprecated accessors @property def", "platform(self): if \"platform\" in self.snuba_data: return self.snuba_data[\"platform\"] return self.data.get(\"platform\") @property", "data, skip_renormalization=False, **kwargs): is_renormalized = isinstance(data, EventDict) or ( isinstance(data,", "\"tags\", filter=True) or () if t is not None and", "Model): \"\"\" An event backed by data stored in postgres.", "UTC because we know # all timestamps from snuba are", "self.group.get_level_display() else: return None # deprecated accessors @property def logger(self):", "return super(SnubaEvent, self).ip_address @property def title(self): if \"title\" in self.snuba_data:", "same grouping config as stored. if # this is None", "@property def release(self): return self.get_tag(\"sentry:release\") @property def dist(self): return self.get_tag(\"sentry:dist\")", "tags(self): try: rv = sorted( [ (t, v) for t,", "at one point Sentry allowed invalid tag sets such as", "return None @property def message(self): if \"message\" in self.snuba_data: return", "from snuba to # avoid duplicated work. minimal_columns = [\"event_id\",", "just replace the TZ with UTC because we know #", "# The minimal list of columns we need to get", "have hashes stored in the data we use them, otherwise", "\"_environment_cache\"): raise AttributeError() if name in self.snuba_data: return self.snuba_data[name] else:", "time_spent(self): return None @property def message(self): if \"message\" in self.snuba_data:", "to this: # return self.get_tag('level') or self.group.get_level_display() if self.group: return", "but will instead return the grouping components for each variant", "the row id anyway. return self.event_id def save(self): raise NotImplementedError", "@property def size(self): return len(json.dumps(dict(self.data))) @property def transaction(self): return self.get_tag(\"transaction\")", "on demand # again. In particular if we were to", "event from the snuba timestamp \"\"\" # dateutil seems to", "from snuba rather than the nodestore event body. This might", "import json from sentry.utils.cache import memoize from sentry.utils.canonical import CanonicalKeyDict,", "will ensure the data fits the type schema. \"\"\" def", "the # config ID is given in which case it's", "warnings.warn(\"Event.logger is deprecated. Use Event.tags instead.\", DeprecationWarning) return self.get_tag(\"logger\") @property", "you only need a few properties, and they are all", "= self.data.get(\"grouping_config\") config = load_grouping_config(config) if normalize_stacktraces: normalize_stacktraces_for_grouping(self.data, config) return", "self.event = event def __getitem__(self, name): if name.startswith(\"tag:\"): name =", "group.id self._group_cache = group @property def project(self): from sentry.models import", "of the django fields on Event # ==================================================== @property def", "== \"projectID\": return self.event.project.slug elif name == \"shortID\" and self.event.group_id:", "string import warnings import pytz from collections import OrderedDict from", "group(self): from sentry.models import Group if not self.group_id: return None", "def get_grouping_config(self): \"\"\"Returns the event grouping config.\"\"\" from sentry.grouping.api import", "self.data.get(\"culprit\") or self.group.culprit return self.data.get(\"culprit\") @property def location(self): # also", "only have project_id and event_id. \"\"\" return md5(\"{}:{}\".format(project_id, event_id)).hexdigest() #", "self.data.get(\"culprit\") @property def location(self): # also see event_manager.py which inserts", "\"\"\" return md5(\"{}:{}\".format(project_id, event_id)).hexdigest() # TODO (alex) We need a", "grouping config as stored. if # this is None the", "get_path(self.data, \"request\", \"env\", \"REMOTE_ADDR\") if remote_addr: return remote_addr return None", "DEPRECATED # ============================================ @property def level(self): # we might want", "if t == key: return v return None @property def", "self.project.organization @property def version(self): return self.data.get(\"version\", \"5\") @property def ip_address(self):", "and we can just return # it, otherwise we defer", "# nodestore anyway, we may as well only fetch the", "not force a different config. if force_config is None: hashes", "__init__(self, snuba_values): \"\"\" When initializing a SnubaEvent, think about the", "a snuba event will never have a django row id,", "so just replace the TZ with UTC because we know", "self.group_id: return None if not hasattr(self, \"_group_cache\"): self._group_cache = Group.objects.get(id=self.group_id)", "minimum from snuba to # avoid duplicated work. minimal_columns =", "not persisted. In those cases put # the culprit in", "fingerprinting and checksums. \"\"\" # If we have hashes stored", "# For a while events did not save the culprit", "\"\"\" An event backed by data stored in snuba. This", "None ] ) return rv except ValueError: # at one", "v_k != \"client_ip\"} data[k] = v # for a long", "self.project) def get_hashes(self, force_config=None): \"\"\" Returns the calculated hashes for", "\"group_id\", \"project_id\", \"timestamp\"] # A list of all useful columns", "subset depending on your needs) But if you know you", "platform = models.CharField(max_length=64, null=True) datetime = models.DateTimeField(default=timezone.now, db_index=True) time_spent =", "sentry.grouping.api import get_grouping_config_dict_for_event_data return get_grouping_config_dict_for_event_data(self.data, self.project) def get_hashes(self, force_config=None): \"\"\"", "server_name(self): warnings.warn( \"Event.server_name is deprecated. Use Event.tags instead.\", DeprecationWarning )", "return # the hex event_id here. We should be moving", "``sentry.eventtypes``. \"\"\" # For some inexplicable reason we have some", "= NodeData(None, node_id, data=None, wrapper=EventDict) def __getattr__(self, name): \"\"\" Depending", "state.pop(\"_group_cache\", None) state.pop(\"interfaces\", None) return state class EventSubjectTemplate(string.Template): idpattern =", "project_id, event_id): \"\"\" Returns a deterministic node_id for this event", "this event from the snuba timestamp \"\"\" # dateutil seems", "body from # nodestore anyway, we may as well only", "if k == \"sdk\": v = {v_k: v_v for v_k,", "[\"event_id\", \"group_id\", \"project_id\", \"timestamp\"] # A list of all useful", "the type of this event. See ``sentry.eventtypes``. \"\"\" return self.data.get(\"type\",", "list of all useful columns we can get from snuba.", "from sentry import eventtypes from sentry.db.models import ( BoundedBigIntegerField, BoundedIntegerField,", "already normalized on the way in and we can just", "organization(self): return self.project.organization @property def version(self): return self.data.get(\"version\", \"5\") @property", "_(\"error\") @property def message_short(self): warnings.warn(\"Event.message_short is deprecated, use Event.title\", DeprecationWarning)", "cached info. We want to fetch this on demand #", "get_grouping_variants_for_event, load_grouping_config from sentry.stacktraces.processing import normalize_stacktraces_for_grouping # Forcing configs has", "a wrapper type for `Event.data` such that creating an event", "return self.data.get(\"message\") @property def platform(self): if \"platform\" in self.snuba_data: return", "See ``sentry.eventtypes``. \"\"\" return self.data.get(\"type\", \"default\") def get_event_metadata(self): \"\"\" Return", "\"logentry\", \"formatted\") or get_path(self.data, \"logentry\", \"message\") or self.message ) def", "project(self, project): if project is None: self.project_id = None else:", "self._group_cache @group.setter def group(self, group): self.group_id = group.id self._group_cache =", "in SnubaEvent.minimal_columns) # self.snuba_data is a dict of all the", "dict of all the stuff we got from snuba self.snuba_data", "to be stripped out in __getstate__. @property def group(self): from", "# TODO(mitsuhiko): remove this code once it's unused. It's still", "there is a reference bug with unsaved # models. But", "This uses the stored information if available. Grouping hashes will", "we can just return # it, otherwise we defer to", "\"\" ) @property def organization(self): return self.project.organization @property def version(self):", "will never have a django row id, just return #", ") # If the data for these is available from", "snuba_values # self.data is a (lazy) dict of everything we", "snuba et = eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) @property def culprit(self): #", "dist(self): return self.get_tag(\"sentry:dist\") def get_raw_data(self): \"\"\"Returns the internal raw event", "============================================ @property def level(self): # we might want to move", "is this not a property? warnings.warn(\"Event.error is deprecated, use Event.title\",", "BoundedBigIntegerField(blank=True, null=True) event_id = models.CharField(max_length=32, null=True, db_column=\"message_id\") project_id = BoundedBigIntegerField(blank=True,", "from snuba to bootstrap an # event. If the client", "Because a snuba event will never have a django row", "# default config dictionary if force_config is not None: if", "self.snuba_data: return self.snuba_data[\"title\"] return super(SnubaEvent, self).title @property def culprit(self): if", "for snuba et = eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) @property def real_message(self):", "self.snuba_data: return self.snuba_data[\"platform\"] return self.data.get(\"platform\") @property def id(self): # Because", "title(self): if \"title\" in self.snuba_data: return self.snuba_data[\"title\"] return super(SnubaEvent, self).title", "def tags(self): try: rv = sorted( [ (t, v) for", "on the project_id and event_id which together are globally unique.", "property? warnings.warn(\"Event.error is deprecated, use Event.title\", DeprecationWarning) return self.title error.short_description", "the entire event body from # nodestore anyway, we may", "data property in the same way as a regular Event.", "`True` then the event data will be modified for `in_app`", "template = EventSubjectTemplate(template) else: template = DEFAULT_SUBJECT_TEMPLATE return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode(", "as there is a reference bug with unsaved # models.", "if name in self.snuba_data: return self.snuba_data[name] else: return self.data[name] #", "row id anyway. return self.event_id def save(self): raise NotImplementedError class", "Reconstruct the datetime of this event from the snuba timestamp", "configs has two separate modes. One is where just the", "return get_grouping_config_dict_for_event_data(self.data, self.project) def get_hashes(self, force_config=None): \"\"\" Returns the calculated", "if \"message\" in self.snuba_data: return self.snuba_data[\"message\"] return self.data.get(\"message\") @property def", "has two separate modes. One is where just the #", "tags \"tags.key\", \"tags.value\", # Required to provide snuba-only 'user' interface", "get_interface(self, name): return self.interfaces.get(name) def get_legacy_message(self): # TODO(mitsuhiko): remove this", "the event data will be modified for `in_app` in addition", "return self.get_tag(\"logger\") @property def site(self): warnings.warn(\"Event.site is deprecated. Use Event.tags", "saved under this key in nodestore so it can be", "self.snuba_data[\"project_id\"], self.snuba_data[\"event_id\"] ) self.data = NodeData(None, node_id, data=None, wrapper=EventDict) def", "in place. \"\"\" from sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config from sentry.stacktraces.processing", "Environment.objects.get( organization_id=self.project.organization_id, name=Environment.get_name_or_default(self.get_tag(\"environment\")), ) return self._environment_cache def get_minimal_user(self): \"\"\" A", "to both Event and SnubaEvent. \"\"\" @classmethod def generate_node_id(cls, project_id,", "@group.setter def group(self, group): self.group_id = group.id self._group_cache = group", "# about state.pop(\"_project_cache\", None) state.pop(\"_environment_cache\", None) state.pop(\"_group_cache\", None) state.pop(\"interfaces\", None)", "class EventSubjectTemplate(string.Template): idpattern = r\"(tag:)?[_a-z][_a-z0-9]*\" class EventSubjectTemplateData(object): tag_aliases = {\"release\":", "\"\"\" Returns a deterministic node_id for this event based on", "project is None: self.project_id = None else: self.project_id = project.id", "can just return # it, otherwise we defer to EventCommon", "def get_minimal_user(self): \"\"\" A minimal 'User' interface object that gives", "def level(self): # we might want to move to this:", "force_config=None): \"\"\" Returns the calculated hashes for the event. This", "an IndexError? return self.get_hashes()[0] @property def title(self): # also see", "isinstance(data, NodeData) and isinstance(data.data, EventDict) ) if not skip_renormalization and", "and properties common to both Event and SnubaEvent. \"\"\" @classmethod", "actual message attribute or # this method could return what", "is similar to `get_hashes` but will instead return the grouping", "persisted. In those cases put # the culprit in from", "def ip_address(self): if \"ip_address\" in self.snuba_data: return self.snuba_data[\"ip_address\"] return super(SnubaEvent,", "self.snuba_data: return self.snuba_data[\"location\"] return super(SnubaEvent, self).location # ==================================================== # Snuba", "et = eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) @property def real_message(self): # XXX(mitsuhiko):", "not is_renormalized: normalizer = StoreNormalizer(is_renormalize=True, enable_trimming=False) data = normalizer.normalize_event(dict(data)) CanonicalKeyDict.__init__(self,", "are all available in snuba, then you should use `SnubaEvent.selected_colums`", "md5 from semaphore.processing import StoreNormalizer from sentry import eventtypes from", "version(self): return self.data.get(\"version\", \"5\") @property def ip_address(self): ip_address = get_path(self.data,", "config) return get_grouping_variants_for_event(self, config) def get_primary_hash(self): # TODO: This *might*", "being used by plugin code and once the message rename", "when we only have project_id and event_id. \"\"\" return md5(\"{}:{}\".format(project_id,", "the attributes come from nodestore. \"\"\" assert all(k in snuba_values", "rv = sorted( [ (t, v) for t, v in", "remote_addr: return remote_addr return None @property def tags(self): try: rv", "know # all timestamps from snuba are UTC. return parse_date(self.timestamp).replace(tzinfo=pytz.utc)", "\"_environment_cache\"): self._environment_cache = Environment.objects.get( organization_id=self.project.organization_id, name=Environment.get_name_or_default(self.get_tag(\"environment\")), ) return self._environment_cache def", "from sentry.stacktraces.processing import normalize_stacktraces_for_grouping # Forcing configs has two separate", "= _(\"message\") verbose_name_plural = _(\"messages\") unique_together = ((\"project_id\", \"event_id\"),) index_together", "return, or we may have to look in the `data`", "`SnubaEvent.minimal_colums` and let the rest of of the attributes come", "get_path(self.data, \"logentry\", \"message\") or self.message ) def get_event_type(self): \"\"\" Return", "interface \"email\", \"ip_address\", \"user_id\", \"username\", ] __repr__ = sane_repr(\"project_id\", \"group_id\")", "components for each variant in a dictionary. If `normalize_stacktraces` is", "self._group_cache = Group.objects.get(id=self.group_id) return self._group_cache @group.setter def group(self, group): self.group_id", "in (\"_project_cache\", \"_group_cache\", \"_environment_cache\"): raise AttributeError() if name in self.snuba_data:", "such that creating an event object (or loading it from", "# have to reference the row id anyway. return self.event_id", "demand # again. In particular if we were to pickle", "requires a nodestore lookup) you may as well just initialize", "the rest of of the attributes come from nodestore. \"\"\"", "Event.tags instead.\", DeprecationWarning ) return self.get_tag(\"server_name\") @property def checksum(self): warnings.warn(\"Event.checksum", "self).culprit @property def location(self): if \"location\" in self.snuba_data: return self.snuba_data[\"location\"]", "NodeData, NodeField, sane_repr, ) from sentry.db.models.manager import EventManager from sentry.interfaces.base", "never have a django row id, just return # the", "can only use # this if we do not force", "and they are all available in snuba, then you should", "for k in SnubaEvent.minimal_columns) # self.snuba_data is a dict of", "addition to event variants being created. This means that after", "self.project_id data[\"release\"] = self.release data[\"dist\"] = self.dist data[\"platform\"] = self.platform", "grouping config.\"\"\" from sentry.grouping.api import get_grouping_config_dict_for_event_data return get_grouping_config_dict_for_event_data(self.data, self.project) def", "data has been modified in place. \"\"\" from sentry.grouping.api import", "self.snuba_data: return self.snuba_data[\"type\"] return super(SnubaEvent, self).get_event_type() @property def ip_address(self): if", "allowed invalid tag sets such as (foo, bar) # vs", "if project is None: self.project_id = None else: self.project_id =", "the nodestore event body. This might be useful for implementing", "def datetime(self): \"\"\" Reconstruct the datetime of this event from", "= self.dist data[\"platform\"] = self.platform data[\"message\"] = self.real_message data[\"datetime\"] =", "instead of UTC even though the string # ends with", "None @property def message(self): if \"message\" in self.snuba_data: return self.snuba_data[\"message\"]", "snuba-only 'user' interface \"email\", \"ip_address\", \"user_id\", \"username\", ] __repr__ =", "= group @property def project(self): from sentry.models import Project if", "None the `get_grouping_variants_for_event` will fill in # the default. else:", "case it's merged with the stored or # default config", "the same generated id when we only have project_id and", "properties. functools32 # doesn't quite do the trick as there", "} ) # If the data for these is available", "sentry.models import Environment if not hasattr(self, \"_environment_cache\"): self._environment_cache = Environment.objects.get(", "{} def get_grouping_config(self): \"\"\"Returns the event grouping config.\"\"\" from sentry.grouping.api", "should be saved under this key in nodestore so it", "get_interfaces(self): return CanonicalKeyView(get_interfaces(self.data)) @memoize def interfaces(self): return self.get_interfaces() def get_interface(self,", "@property def ip_address(self): ip_address = get_path(self.data, \"user\", \"ip_address\") if ip_address:", "fetched from snuba, and the event body is fetched from", "models.DateTimeField(default=timezone.now, db_index=True) time_spent = BoundedIntegerField(null=True) data = NodeField( blank=True, null=True,", "# models. But the current _group_cache thing is also clunky", "hasattr(self, \"_environment_cache\"): self._environment_cache = Environment.objects.get( organization_id=self.project.organization_id, name=Environment.get_name_or_default(self.get_tag(\"environment\")), ) return self._environment_cache", "\"default\") def get_event_metadata(self): \"\"\" Return the metadata of this event.", "self.group_id = group.id self._group_cache = group @property def project(self): from", "node_id for this event based on the project_id and event_id", "cases put # the culprit in from the group. if", "= Model.__getstate__(self) # do not pickle cached info. We want", "to move to this: # return LOG_LEVELS_MAP.get(self.get_level_display()) or self.group.level if", "config = dict(stored_config) config[\"id\"] = force_config else: config = force_config", "is None: self.project_id = None else: self.project_id = project.id self._project_cache", "body is fetched from nodestore and bound to the data", "@property def time_spent(self): return None @property def message(self): if \"message\"", "self.data.get(\"message\") @property def platform(self): if \"platform\" in self.snuba_data: return self.snuba_data[\"platform\"]", "def get_raw_data(self): \"\"\"Returns the internal raw event data dict.\"\"\" return", "et.get_title(self.get_event_metadata()) @property def culprit(self): # For a while events did", "do not pickle cached info. We want to fetch this", "validation called \"re-normalization\". This is used as a wrapper type", "culprit if self.group_id: return self.data.get(\"culprit\") or self.group.culprit return self.data.get(\"culprit\") @property", "sentry.stacktraces.processing import normalize_stacktraces_for_grouping # Forcing configs has two separate modes.", "name == \"title\": return self.event.title raise KeyError DEFAULT_SUBJECT_TEMPLATE = EventSubjectTemplate(\"$shortID", "BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr, ) from sentry.db.models.manager import", "key): for t, v in self.get_tags(): if t == key:", "XXX(mitsuhiko): this is a transitional attribute that should be #", "also see event_manager.py which inserts this for snuba et =", "is deprecated. Use Event.tags instead.\", DeprecationWarning ) return self.get_tag(\"server_name\") @property", "in self.snuba_data and \"tags.value\" in self.snuba_data: keys = getattr(self, \"tags.key\")", "= eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) @property def culprit(self): # For a", "= self.datetime data[\"time_spent\"] = self.time_spent data[\"tags\"] = [(k.split(\"sentry:\", 1)[-1], v)", "============================================ # Snuba-only implementations of properties that # would otherwise", "a subset depending on your needs) But if you know", "return self._project_cache @project.setter def project(self, project): if project is None:", "import absolute_import import six import string import warnings import pytz", "it can be retrieved using the same generated id when", "force a different config. if force_config is None: hashes =", "on your needs) But if you know you are going", "def culprit(self): # For a while events did not save", "we got from snuba self.snuba_data = snuba_values # self.data is", "((tag, foo), (tag, bar)) return [] # For compatibility, still", "assume # it was already normalized on the way in", "if \"title\" in self.snuba_data: return self.snuba_data[\"title\"] return super(SnubaEvent, self).title @property", "snuba-only tags \"tags.key\", \"tags.value\", # Required to provide snuba-only 'user'", "__repr__ = sane_repr(\"project_id\", \"group_id\") def __init__(self, snuba_values): \"\"\" When initializing", "have project_id and event_id. \"\"\" return md5(\"{}:{}\".format(project_id, event_id)).hexdigest() # TODO", "useful for implementing tag deletions without having to rewrite nodestore", "return super(SnubaEvent, self).get_event_type() @property def ip_address(self): if \"ip_address\" in self.snuba_data:", "self.snuba_data: return self.snuba_data[\"culprit\"] return super(SnubaEvent, self).culprit @property def location(self): if", "the actual message attribute or # this method could return", "event will never have a django row id, just return", "\"tags.value\") if keys and values and len(keys) == len(values): return", "type accesses will come through here. \"\"\" if name in", "= _(\"error\") @property def message_short(self): warnings.warn(\"Event.message_short is deprecated, use Event.title\",", "return None # deprecated accessors @property def logger(self): warnings.warn(\"Event.logger is", "We can only use # this if we do not", "\"\"\" __core__ = False group_id = BoundedBigIntegerField(blank=True, null=True) event_id =", "event body anyway (which requires a nodestore lookup) you may", "= {v_k: v_v for v_k, v_v in six.iteritems(v) if v_k", "None) return state class EventSubjectTemplate(string.Template): idpattern = r\"(tag:)?[_a-z][_a-z0-9]*\" class EventSubjectTemplateData(object):", "state class EventSubjectTemplate(string.Template): idpattern = r\"(tag:)?[_a-z][_a-z0-9]*\" class EventSubjectTemplateData(object): tag_aliases =", "snuba rather than the nodestore event body. This might be", "return self.data.get(\"platform\") @property def id(self): # Because a snuba event", "force_config is None: hashes = self.data.get(\"hashes\") if hashes is not", "This *might* need to be protected from an IndexError? return", "= project.id self._project_cache = project def get_interfaces(self): return CanonicalKeyView(get_interfaces(self.data)) @memoize", "pytz from collections import OrderedDict from dateutil.parser import parse as", "anyway. return self.event_id def save(self): raise NotImplementedError class Event(EventCommon, Model):", "def get_grouping_variants(self, force_config=None, normalize_stacktraces=False): \"\"\" This is similar to `get_hashes`", "backed by data stored in postgres. \"\"\" __core__ = False", "message(self): if \"message\" in self.snuba_data: return self.snuba_data[\"message\"] return self.data.get(\"message\") @property", "together are globally unique. The event body should be saved", "event): self.event = event def __getitem__(self, name): if name.startswith(\"tag:\"): name", "normalized form for external consumers.\"\"\" # We use a OrderedDict", "= get_path(self.data, \"request\", \"env\", \"REMOTE_ADDR\") if remote_addr: return remote_addr return", "SnubaEvent(EventCommon): \"\"\" An event backed by data stored in snuba.", "to move to this: # return self.get_tag('level') or self.group.get_level_display() if", "def tags(self): \"\"\" Override of tags property that uses tags", "if not self.group_id: return None if not hasattr(self, \"_group_cache\"): self._group_cache", "= self.release data[\"dist\"] = self.dist data[\"platform\"] = self.platform data[\"message\"] =", "self.snuba_data is a dict of all the stuff we got", "are globally unique. The event body should be saved under", "# config ID is given in which case it's merged", "with UTC because we know # all timestamps from snuba", "But the current _group_cache thing is also clunky because these", "import ( BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr, ) from", "NodeField, sane_repr, ) from sentry.db.models.manager import EventManager from sentry.interfaces.base import", "force_config is not None: if isinstance(force_config, six.string_types): stored_config = self.get_grouping_config()", "in six.iteritems(v) if v_k != \"client_ip\"} data[k] = v #", "get_minimal_user(self): \"\"\" A minimal 'User' interface object that gives us", "v_v in six.iteritems(v) if v_k != \"client_ip\"} data[k] = v", "based on the project_id and event_id which together are globally", "will be renamed to `search_message` and this # will become", "functools32 # doesn't quite do the trick as there is", "@property def transaction(self): return self.get_tag(\"transaction\") def get_email_subject(self): template = self.project.get_option(\"mail:subject_template\")", "return self.get_tag(\"server_name\") @property def checksum(self): warnings.warn(\"Event.checksum is no longer used\",", "is not None and v is not None ] )", "k in SnubaEvent.minimal_columns) # self.snuba_data is a dict of all", "event data dict.\"\"\" return dict(self.data.items()) @property def size(self): return len(json.dumps(dict(self.data)))", "a readonly event and does not support event creation or", "basic event data is fetched from snuba, and the event", "row id, just return # the hex event_id here. We", "to hobble along # further. return self.data.get(\"metadata\") or {} def", "event_manager.py which inserts this for snuba et = eventtypes.get(self.get_event_type())() return", "in sorted(six.iteritems(self.data)): if k in data: continue if k ==", "error.short_description = _(\"error\") @property def message_short(self): warnings.warn(\"Event.message_short is deprecated, use", "group(self, group): self.group_id = group.id self._group_cache = group @property def", "return super(SnubaEvent, self).title @property def culprit(self): if \"culprit\" in self.snuba_data:", "==================================================== # Snuba implementations of the django fields on Event", "# for a long time culprit was not persisted. In", "( BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr, ) from sentry.db.models.manager", "those cases put # the culprit in from the group.", "on Event # ==================================================== @property def datetime(self): \"\"\" Reconstruct the", "self).tags def get_minimal_user(self): from sentry.interfaces.user import User return User.to_python( {", "may as well only fetch the minimum from snuba to", "snuba, and the event body is fetched from nodestore and", "== key: return v return None @property def release(self): return", "User return User.to_python( { \"id\": self.user_id, \"email\": self.email, \"username\": self.username,", "the event with `SnubaEvent.minimal_colums` and let the rest of of", "def platform(self): if \"platform\" in self.snuba_data: return self.snuba_data[\"platform\"] return self.data.get(\"platform\")", "has been modified in place. \"\"\" from sentry.grouping.api import get_grouping_variants_for_event,", "set to `True` then the event data will be modified", "to use tzlocal() instead of UTC even though the string", "node_id = SnubaEvent.generate_node_id( self.snuba_data[\"project_id\"], self.snuba_data[\"event_id\"] ) self.data = NodeData(None, node_id,", "in get_path(self.data, \"tags\", filter=True) or () if t is not", "self.platform data[\"message\"] = self.real_message data[\"datetime\"] = self.datetime data[\"time_spent\"] = self.time_spent", "or self.group.get_level_display() if self.group: return self.group.get_level_display() else: return None #", "elif name == \"title\": return self.event.title raise KeyError DEFAULT_SUBJECT_TEMPLATE =", "otherwise require nodestore data. # ============================================ @property def tags(self): \"\"\"", "v) for (k, v) in self.tags] for k, v in", "here. \"\"\" if name in (\"_project_cache\", \"_group_cache\", \"_environment_cache\"): raise AttributeError()", "from sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config from sentry.stacktraces.processing import normalize_stacktraces_for_grouping #", "return self.event.organization.slug elif name == \"title\": return self.event.title raise KeyError", "is planning on loading the entire event body from #", "template = DEFAULT_SUBJECT_TEMPLATE return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode( \"utf-8\" ) def get_environment(self):", "# avoid duplicated work. minimal_columns = [\"event_id\", \"group_id\", \"project_id\", \"timestamp\"]", "return User.to_python( { \"id\": self.user_id, \"email\": self.email, \"username\": self.username, \"ip_address\":", "of this event. See ``sentry.eventtypes``. \"\"\" return self.data.get(\"type\", \"default\") def", "data[\"datetime\"] = self.datetime data[\"time_spent\"] = self.time_spent data[\"tags\"] = [(k.split(\"sentry:\", 1)[-1],", "Required to provide snuba-only tags \"tags.key\", \"tags.value\", # Required to", "time_spent = BoundedIntegerField(null=True) data = NodeField( blank=True, null=True, ref_func=lambda x:", "should be moving to a world where we never #", "if \"culprit\" in self.snuba_data: return self.snuba_data[\"culprit\"] return super(SnubaEvent, self).culprit @property", "vs ((tag, foo), (tag, bar)) return [] # For compatibility,", "get_interfaces from sentry.utils import json from sentry.utils.cache import memoize from", "normalizer.normalize_event(dict(data)) CanonicalKeyDict.__init__(self, data, **kwargs) class EventCommon(object): \"\"\" Methods and properties", "raise AttributeError() if name in self.snuba_data: return self.snuba_data[name] else: return", "can be retrieved using the same generated id when we", "not support event creation or save. The basic event data", "def organization(self): return self.project.organization @property def version(self): return self.data.get(\"version\", \"5\")", "\"\"\" Returns the calculated hashes for the event. This uses", "\"env\", \"REMOTE_ADDR\") if remote_addr: return remote_addr return None @property def", "self.get_hashes()[0] @property def title(self): # also see event_manager.py which inserts", "\"logentry\", \"formatted\") or get_path(self.data, \"logentry\", \"message\") or \"\" ) @property", "# Snuba-only implementations of properties that # would otherwise require", "internal raw event data dict.\"\"\" return dict(self.data.items()) @property def size(self):", "common to both Event and SnubaEvent. \"\"\" @classmethod def generate_node_id(cls,", "need the entire event body anyway (which requires a nodestore", "return self.event.group.qualified_short_id elif name == \"orgID\": return self.event.organization.slug elif name", "name == \"projectID\": return self.event.project.slug elif name == \"shortID\" and", "def as_dict(self): \"\"\"Returns the data in normalized form for external", "used as a wrapper type for `Event.data` such that creating", "assert all(k in snuba_values for k in SnubaEvent.minimal_columns) # self.snuba_data", "we got from nodestore node_id = SnubaEvent.generate_node_id( self.snuba_data[\"project_id\"], self.snuba_data[\"event_id\"] )", "event backed by data stored in snuba. This is a", "from the group. if data.get(\"culprit\") is None and self.group_id: data[\"culprit\"]", "wrapper=EventDict, ) objects = EventManager() class Meta: app_label = \"sentry\"", "if you know you are going to need the entire", "import models from django.utils import timezone from django.utils.translation import ugettext_lazy", ") objects = EventManager() class Meta: app_label = \"sentry\" db_table", "EventManager() class Meta: app_label = \"sentry\" db_table = \"sentry_message\" verbose_name", "be protected from an IndexError? return self.get_hashes()[0] @property def title(self):", "None else: self.project_id = project.id self._project_cache = project def get_interfaces(self):", "def get_minimal_user(self): from sentry.interfaces.user import User return User.to_python( { \"id\":", "AttributeError() if name in self.snuba_data: return self.snuba_data[name] else: return self.data[name]", "we may have to look in the `data` dict (which", "once the message rename is through # plugins should instead", "as stored. if # this is None the `get_grouping_variants_for_event` will", "in the data we use them, otherwise we # fall", "def interfaces(self): return self.get_interfaces() def get_interface(self, name): return self.interfaces.get(name) def", "self.group: return self.group.get_level_display() else: return None # deprecated accessors @property", "logger(self): warnings.warn(\"Event.logger is deprecated. Use Event.tags instead.\", DeprecationWarning) return self.get_tag(\"logger\")", "the data in normalized form for external consumers.\"\"\" # We", "this key in nodestore so it can be retrieved using", "not know # about state.pop(\"_project_cache\", None) state.pop(\"_environment_cache\", None) state.pop(\"_group_cache\", None)", "and not is_renormalized: normalizer = StoreNormalizer(is_renormalize=True, enable_trimming=False) data = normalizer.normalize_event(dict(data))", "config = load_grouping_config(config) if normalize_stacktraces: normalize_stacktraces_for_grouping(self.data, config) return get_grouping_variants_for_event(self, config)", "group_id = BoundedBigIntegerField(blank=True, null=True) event_id = models.CharField(max_length=32, null=True, db_column=\"message_id\") project_id", "data.get(\"culprit\") is None and self.group_id: data[\"culprit\"] = self.group.culprit # Override", "in snuba. This is a readonly event and does not", "will become `message`. return ( get_path(self.data, \"logentry\", \"formatted\") or get_path(self.data,", "TODO why is this not a property? warnings.warn(\"Event.error is deprecated,", "same way as a regular Event. \"\"\" # The minimal", "from # nodestore anyway, we may as well only fetch", "project def get_interfaces(self): return CanonicalKeyView(get_interfaces(self.data)) @memoize def interfaces(self): return self.get_interfaces()", "wrapper=EventDict) def __getattr__(self, name): \"\"\" Depending on what snuba data", "# self.data is a (lazy) dict of everything we got", "else: self.project_id = project.id self._project_cache = project def get_interfaces(self): return", "will fill in # the default. else: config = self.data.get(\"grouping_config\")", "method could return what currently is real_message. return ( get_path(self.data,", "t is not None and v is not None ]", "from sentry.utils.cache import memoize from sentry.utils.canonical import CanonicalKeyDict, CanonicalKeyView from", "self.location return data # ============================================ # DEPRECATED # ============================================ @property", "even though the string # ends with '+00:00', so just", "( get_path(self.data, \"logentry\", \"formatted\") or get_path(self.data, \"logentry\", \"message\") or \"\"", "should instead swithc to the actual message attribute or #", "the default. else: config = self.data.get(\"grouping_config\") config = load_grouping_config(config) if", "sentry.db.models import ( BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr, )", "def generate_node_id(cls, project_id, event_id): \"\"\" Returns a deterministic node_id for", "that # would otherwise require nodestore data. # ============================================ @property", "get_email_subject(self): template = self.project.get_option(\"mail:subject_template\") if template: template = EventSubjectTemplate(template) else:", "self.event_id def save(self): raise NotImplementedError class Event(EventCommon, Model): \"\"\" An", "let the rest of of the attributes come from nodestore.", "self.get_tag(\"site\") @property def server_name(self): warnings.warn( \"Event.server_name is deprecated. Use Event.tags", "from semaphore.processing import StoreNormalizer from sentry import eventtypes from sentry.db.models", "generated data data[\"title\"] = self.title data[\"location\"] = self.location return data", "= self.project.get_option(\"mail:subject_template\") if template: template = EventSubjectTemplate(template) else: template =", "self).location # ==================================================== # Snuba implementations of the django fields", "to EventCommon implementation. def get_event_type(self): if \"type\" in self.snuba_data: return", "It's still # being used by plugin code and once", "bar) # vs ((tag, foo), (tag, bar)) return [] #", "\"sentry:user\"} def __init__(self, event): self.event = event def __getitem__(self, name):", "hobble along # further. return self.data.get(\"metadata\") or {} def get_grouping_config(self):", "in self.snuba_data: return self.snuba_data[\"title\"] return super(SnubaEvent, self).title @property def culprit(self):", "return None @property def tags(self): try: rv = sorted( [", "form for external consumers.\"\"\" # We use a OrderedDict to", "getattr(self, \"tags.key\") values = getattr(self, \"tags.value\") if keys and values", "def checksum(self): warnings.warn(\"Event.checksum is no longer used\", DeprecationWarning) return \"\"", "def get_interface(self, name): return self.interfaces.get(name) def get_legacy_message(self): # TODO(mitsuhiko): remove", "if available. Grouping hashes will take into account fingerprinting and", "# this method could return what currently is real_message. return", "to cache these properties. functools32 # doesn't quite do the", "world where we never # have to reference the row", "stored. if # this is None the `get_grouping_variants_for_event` will fill", "self.snuba_data: keys = getattr(self, \"tags.key\") values = getattr(self, \"tags.value\") if", "reference the row id anyway. return self.event_id def save(self): raise", "def project(self): from sentry.models import Project if not hasattr(self, \"_project_cache\"):", "# plugins should instead swithc to the actual message attribute", "(foo, bar) # vs ((tag, foo), (tag, bar)) return []", "is used as a wrapper type for `Event.data` such that", "if force_config is not None: if isinstance(force_config, six.string_types): stored_config =", "from sentry.utils import json from sentry.utils.cache import memoize from sentry.utils.canonical", "import get_interfaces from sentry.utils import json from sentry.utils.cache import memoize", "\"\"\" This is similar to `get_hashes` but will instead return", "sane_repr(\"project_id\", \"group_id\") def __getstate__(self): state = Model.__getstate__(self) # do not", "from an IndexError? return self.get_hashes()[0] @property def title(self): # also", "else: return None # deprecated accessors @property def logger(self): warnings.warn(\"Event.logger", "EventManager from sentry.interfaces.base import get_interfaces from sentry.utils import json from", "a SnubaEvent, think about the attributes you might need to", "ip_address(self): ip_address = get_path(self.data, \"user\", \"ip_address\") if ip_address: return ip_address", "key in nodestore so it can be retrieved using the", "timezone from django.utils.translation import ugettext_lazy as _ from hashlib import", "= models.TextField() platform = models.CharField(max_length=64, null=True) datetime = models.DateTimeField(default=timezone.now, db_index=True)", "data dict.\"\"\" return dict(self.data.items()) @property def size(self): return len(json.dumps(dict(self.data))) @property", "= [(k.split(\"sentry:\", 1)[-1], v) for (k, v) in self.tags] for", "((\"project_id\", \"event_id\"),) index_together = ((\"group_id\", \"datetime\"),) __repr__ = sane_repr(\"project_id\", \"group_id\")", "\"event_id\"),) index_together = ((\"group_id\", \"datetime\"),) __repr__ = sane_repr(\"project_id\", \"group_id\") def", "was already normalized on the way in and we can", "not self.group_id: return None if not hasattr(self, \"_group_cache\"): self._group_cache =", "Meta: app_label = \"sentry\" db_table = \"sentry_message\" verbose_name = _(\"message\")", "see event_manager.py which inserts this for snuba et = eventtypes.get(self.get_event_type())()", "some cases where the data # is completely empty. In", "DeprecationWarning) return self.get_tag(\"site\") @property def server_name(self): warnings.warn( \"Event.server_name is deprecated.", "be useful for implementing tag deletions without having to rewrite", "pickle cached info. We want to fetch this on demand", "to the data property in the same way as a", "ip_address(self): if \"ip_address\" in self.snuba_data: return self.snuba_data[\"ip_address\"] return super(SnubaEvent, self).ip_address", "config[\"id\"] = force_config else: config = force_config # Otherwise we", "if t is not None and v is not None", "get from snuba. selected_columns = minimal_columns + [ \"culprit\", \"location\",", "group): self.group_id = group.id self._group_cache = group @property def project(self):", "the data. We can only use # this if we", "models from django.utils import timezone from django.utils.translation import ugettext_lazy as", "of of the attributes come from nodestore. \"\"\" assert all(k", "import parse as parse_date from django.db import models from django.utils", "= self.group.culprit # Override title and location with dynamically generated", "self.username, \"ip_address\": self.ip_address, } ) # If the data for", "default config dictionary if force_config is not None: if isinstance(force_config,", "culprit(self): # For a while events did not save the", "from sentry.models import Group if not self.group_id: return None if", "\"\"\" Return the type of this event. See ``sentry.eventtypes``. \"\"\"", "import timezone from django.utils.translation import ugettext_lazy as _ from hashlib", "Group.objects.get(id=self.group_id) return self._group_cache @group.setter def group(self, group): self.group_id = group.id", "the TZ with UTC because we know # all timestamps", "TZ with UTC because we know # all timestamps from", "account fingerprinting and checksums. \"\"\" # If we have hashes", "serializer data = OrderedDict() data[\"event_id\"] = self.event_id data[\"project\"] = self.project_id", "All unresolved self.foo type accesses will come through here. \"\"\"", "else: config = force_config # Otherwise we just use the", "for this event based on the project_id and event_id which", "v return None @property def release(self): return self.get_tag(\"sentry:release\") @property def", "If the data for these is available from snuba, we", "or {} def get_grouping_config(self): \"\"\"Returns the event grouping config.\"\"\" from", "we might want to move to this: # return self.get_tag('level')", "self.data is a (lazy) dict of everything we got from", "\"orgID\": return self.event.organization.slug elif name == \"title\": return self.event.title raise", "used by plugins. def get_tags(self): return self.tags def get_tag(self, key):", "object that gives us enough information to render a user", "self._project_cache @project.setter def project(self, project): if project is None: self.project_id", "state.pop(\"_environment_cache\", None) state.pop(\"_group_cache\", None) state.pop(\"interfaces\", None) return state class EventSubjectTemplate(string.Template):", "\"location\", \"message\", \"platform\", \"title\", \"type\", # Required to provide snuba-only", "We use a OrderedDict to keep elements ordered for a", "# the default. else: config = self.data.get(\"grouping_config\") config = load_grouping_config(config)", "Use Event.tags instead.\", DeprecationWarning) return self.get_tag(\"site\") @property def server_name(self): warnings.warn(", "stuff we got from snuba self.snuba_data = snuba_values # self.data", "tags(self): \"\"\" Override of tags property that uses tags from", "is set to `True` then the event data will be", "to get from snuba to bootstrap an # event. If", "\"\"\" Depending on what snuba data this event was initialized", "@property def message_short(self): warnings.warn(\"Event.message_short is deprecated, use Event.title\", DeprecationWarning) return", "access on it. If you only need a few properties,", "self.get_tag(\"logger\") @property def site(self): warnings.warn(\"Event.site is deprecated. Use Event.tags instead.\",", "sentry import eventtypes from sentry.db.models import ( BoundedBigIntegerField, BoundedIntegerField, Model,", "None: return hashes return filter( None, [x.get_hash() for x in", "= minimal_columns + [ \"culprit\", \"location\", \"message\", \"platform\", \"title\", \"type\",", "Event # ==================================================== @property def datetime(self): \"\"\" Reconstruct the datetime", "of properties that # would otherwise require nodestore data. #", "invalid tag sets such as (foo, bar) # vs ((tag,", "through here. \"\"\" if name in (\"_project_cache\", \"_group_cache\", \"_environment_cache\"): raise", "replace the TZ with UTC because we know # all", "in self.snuba_data: return self.snuba_data[\"message\"] return self.data.get(\"message\") @property def platform(self): if", "is no longer used\", DeprecationWarning) return \"\" def error(self): #", "= get_path(self.data, \"user\", \"ip_address\") if ip_address: return ip_address remote_addr =", "NodeData) and isinstance(data.data, EventDict) ) if not skip_renormalization and not", "\"tags.key\") values = getattr(self, \"tags.value\") if keys and values and", "return self.snuba_data[\"location\"] return super(SnubaEvent, self).location # ==================================================== # Snuba implementations", "self.tags] for k, v in sorted(six.iteritems(self.data)): if k in data:", "\"type\", # Required to provide snuba-only tags \"tags.key\", \"tags.value\", #", "else: return [] else: return super(SnubaEvent, self).tags def get_minimal_user(self): from", "uses the stored information if available. Grouping hashes will take", "a dict of all the stuff we got from snuba", "implementing tag deletions without having to rewrite nodestore blobs. \"\"\"", "Creating an instance of this dictionary will send the event", "= snuba_values # self.data is a (lazy) dict of everything", "warnings.warn(\"Event.checksum is no longer used\", DeprecationWarning) return \"\" def error(self):", "snuba are UTC. return parse_date(self.timestamp).replace(tzinfo=pytz.utc) @property def time_spent(self): return None", "We want to fetch this on demand # again. In", "were to pickle interfaces we would # pickle a CanonicalKeyView", "return \"\" def error(self): # TODO why is this not", "name): \"\"\" Depending on what snuba data this event was", "return self.snuba_data[name] else: return self.data[name] # ============================================ # Snuba-only implementations", "self.group.level if self.group: return self.group.level else: return None def get_level_display(self):", "v is not None ] ) return rv except ValueError:", "data[\"culprit\"] = self.group.culprit # Override title and location with dynamically", "= self.location return data # ============================================ # DEPRECATED # ============================================", "= getattr(self, \"tags.value\") if keys and values and len(keys) ==", "import EventManager from sentry.interfaces.base import get_interfaces from sentry.utils import json", "from sentry.models import Environment if not hasattr(self, \"_environment_cache\"): self._environment_cache =", "But if you know you are going to need the", "is fetched from nodestore and bound to the data property", "(tag, bar)) return [] # For compatibility, still used by", "entire event body anyway (which requires a nodestore lookup) you", "__init__(self, event): self.event = event def __getitem__(self, name): if name.startswith(\"tag:\"):", "\"sdk\": v = {v_k: v_v for v_k, v_v in six.iteritems(v)", "event body should be saved under this key in nodestore", "checksums. \"\"\" # If we have hashes stored in the", "ip_address = get_path(self.data, \"user\", \"ip_address\") if ip_address: return ip_address remote_addr", "and v is not None ] ) return rv except", "get_environment(self): from sentry.models import Environment if not hasattr(self, \"_environment_cache\"): self._environment_cache", "et.get_location(self.get_event_metadata()) @property def real_message(self): # XXX(mitsuhiko): this is a transitional", "is given in which case it's merged with the stored", "def __getstate__(self): state = Model.__getstate__(self) # do not pickle cached", "from sentry.utils.canonical import CanonicalKeyDict, CanonicalKeyView from sentry.utils.safe import get_path from", "self.get_grouping_variants(force_config).values()] ) def get_grouping_variants(self, force_config=None, normalize_stacktraces=False): \"\"\" This is similar", "renamed to `search_message` and this # will become `message`. return", "for `Event.data` such that creating an event object (or loading", "get_path(self.data, \"logentry\", \"formatted\") or get_path(self.data, \"logentry\", \"message\") or \"\" )", "data # is completely empty. In that case we want", "@property def group(self): from sentry.models import Group if not self.group_id:", "return self.get_hashes()[0] @property def title(self): # also see event_manager.py which", "properties, and they are all available in snuba, then you", "deprecated. Use Event.tags instead.\", DeprecationWarning ) return self.get_tag(\"server_name\") @property def", "snuba. selected_columns = minimal_columns + [ \"culprit\", \"location\", \"message\", \"platform\",", "data[\"message\"] = self.real_message data[\"datetime\"] = self.datetime data[\"time_spent\"] = self.time_spent data[\"tags\"]", "v in self.get_tags(): if t == key: return v return", "created. This means that after calling that function the event", "uses tags from snuba rather than the nodestore event body.", "\"request\", \"env\", \"REMOTE_ADDR\") if remote_addr: return remote_addr return None @property", "def title(self): if \"title\" in self.snuba_data: return self.snuba_data[\"title\"] return super(SnubaEvent,", "This might be useful for implementing tag deletions without having", "= sorted( [ (t, v) for t, v in get_path(self.data,", "def get_tag(self, key): for t, v in self.get_tags(): if t", "get_tags(self): return self.tags def get_tag(self, key): for t, v in", "moving to a world where we never # have to", "initialized with, we may have the data available to return,", "def logger(self): warnings.warn(\"Event.logger is deprecated. Use Event.tags instead.\", DeprecationWarning) return", "what currently is real_message. return ( get_path(self.data, \"logentry\", \"formatted\") or", "which inserts this for snuba et = eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata())", "django row id, just return # the hex event_id here.", "sentry.interfaces.base import get_interfaces from sentry.utils import json from sentry.utils.cache import", "of UTC even though the string # ends with '+00:00',", "[ (t, v) for t, v in get_path(self.data, \"tags\", filter=True)", "need to be protected from an IndexError? return self.get_hashes()[0] @property", "return hashes return filter( None, [x.get_hash() for x in self.get_grouping_variants(force_config).values()]", "is fetched from snuba, and the event body is fetched", "name == \"shortID\" and self.event.group_id: return self.event.group.qualified_short_id elif name ==", "@property def tags(self): try: rv = sorted( [ (t, v)", "config = self.data.get(\"grouping_config\") config = load_grouping_config(config) if normalize_stacktraces: normalize_stacktraces_for_grouping(self.data, config)", "t == key: return v return None @property def release(self):", "to `search_message` and this # will become `message`. return (", "hasattr(self, \"_project_cache\"): self._project_cache = Project.objects.get(id=self.project_id) return self._project_cache @project.setter def project(self,", "return self.get_tag(\"sentry:dist\") def get_raw_data(self): \"\"\"Returns the internal raw event data", "get from snuba to bootstrap an # event. If the", "as _ from hashlib import md5 from semaphore.processing import StoreNormalizer", "unused. It's still # being used by plugin code and", "would force a nodestore load). All unresolved self.foo type accesses", "is available from snuba, we assume # it was already", "fill in # the default. else: config = self.data.get(\"grouping_config\") config", "generate_node_id(cls, project_id, event_id): \"\"\" Returns a deterministic node_id for this", "return self.data[name] # ============================================ # Snuba-only implementations of properties that", "Environment if not hasattr(self, \"_environment_cache\"): self._environment_cache = Environment.objects.get( organization_id=self.project.organization_id, name=Environment.get_name_or_default(self.get_tag(\"environment\")),", ") return self._environment_cache def get_minimal_user(self): \"\"\" A minimal 'User' interface", "self.real_message data[\"datetime\"] = self.datetime data[\"time_spent\"] = self.time_spent data[\"tags\"] = [(k.split(\"sentry:\",", "have a django row id, just return # the hex", "work. minimal_columns = [\"event_id\", \"group_id\", \"project_id\", \"timestamp\"] # A list", "of this event. See ``sentry.eventtypes``. \"\"\" # For some inexplicable", "message attribute or # this method could return what currently", "\"\"\" from sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config from sentry.stacktraces.processing import normalize_stacktraces_for_grouping", "for `in_app` in addition to event variants being created. This", "hasattr(self, \"_group_cache\"): self._group_cache = Group.objects.get(id=self.group_id) return self._group_cache @group.setter def group(self,", "in postgres. \"\"\" __core__ = False group_id = BoundedBigIntegerField(blank=True, null=True)", "x in self.get_grouping_variants(force_config).values()] ) def get_grouping_variants(self, force_config=None, normalize_stacktraces=False): \"\"\" This", "data=None, wrapper=EventDict) def __getattr__(self, name): \"\"\" Depending on what snuba", "title and location with dynamically generated data data[\"title\"] = self.title", "name.startswith(\"tag:\"): name = name[4:] value = self.event.get_tag(self.tag_aliases.get(name, name)) if value", "self.group.level else: return None def get_level_display(self): # we might want", "for a long time culprit was not persisted. In those", "the grouping components for each variant in a dictionary. If", "= sane_repr(\"project_id\", \"group_id\") def __init__(self, snuba_values): \"\"\" When initializing a", "from nodestore. \"\"\" assert all(k in snuba_values for k in", "doesn't quite do the trick as there is a reference", "= Project.objects.get(id=self.project_id) return self._project_cache @project.setter def project(self, project): if project", "BoundedIntegerField(null=True) data = NodeField( blank=True, null=True, ref_func=lambda x: x.project_id or", "modes. One is where just the # config ID is", "# XXX(mitsuhiko): this is a transitional attribute that should be", "instead.\", DeprecationWarning) return self.get_tag(\"logger\") @property def site(self): warnings.warn(\"Event.site is deprecated.", "to event variants being created. This means that after calling", "\"tags.value\", # Required to provide snuba-only 'user' interface \"email\", \"ip_address\",", "event with `SnubaEvent.minimal_colums` and let the rest of of the", "information to render a user badge. \"\"\" return self.get_interface(\"user\") def", "[(k.split(\"sentry:\", 1)[-1], v) for (k, v) in self.tags] for k,", "self.group_id: data[\"culprit\"] = self.group.culprit # Override title and location with", "accesses will come through here. \"\"\" if name in (\"_project_cache\",", "to provide snuba-only tags \"tags.key\", \"tags.value\", # Required to provide", "may as well just initialize the event with `SnubaEvent.minimal_colums` and", "event. This uses the stored information if available. Grouping hashes", "where just the # config ID is given in which", "self.data.get(\"type\", \"default\") def get_event_metadata(self): \"\"\" Return the metadata of this", "If we have hashes stored in the data we use", "# at one point Sentry allowed invalid tag sets such", "return self.tags def get_tag(self, key): for t, v in self.get_tags():", "we have some cases where the data # is completely", "\"\"\" return self.get_interface(\"user\") def as_dict(self): \"\"\"Returns the data in normalized", "tag_aliases = {\"release\": \"sentry:release\", \"dist\": \"sentry:dist\", \"user\": \"sentry:user\"} def __init__(self,", "that uses tags from snuba rather than the nodestore event", "warnings.warn(\"Event.error is deprecated, use Event.title\", DeprecationWarning) return self.title error.short_description =", "for t, v in get_path(self.data, \"tags\", filter=True) or () if", "self._environment_cache def get_minimal_user(self): \"\"\" A minimal 'User' interface object that", "==================================================== @property def datetime(self): \"\"\" Reconstruct the datetime of this", "minimal_columns = [\"event_id\", \"group_id\", \"project_id\", \"timestamp\"] # A list of", "data is fetched from snuba, and the event body is", "the DB) will ensure the data fits the type schema.", "this: # return LOG_LEVELS_MAP.get(self.get_level_display()) or self.group.level if self.group: return self.group.level", "= event def __getitem__(self, name): if name.startswith(\"tag:\"): name = name[4:]", "If `normalize_stacktraces` is set to `True` then the event data", "from hashlib import md5 from semaphore.processing import StoreNormalizer from sentry", "property in the same way as a regular Event. \"\"\"", "BoundedIntegerField, Model, NodeData, NodeField, sane_repr, ) from sentry.db.models.manager import EventManager", "null=True, ref_func=lambda x: x.project_id or x.project.id, ref_version=2, wrapper=EventDict, ) objects", "get_event_type(self): \"\"\" Return the type of this event. See ``sentry.eventtypes``.", "self.event.get_tag(self.tag_aliases.get(name, name)) if value is None: raise KeyError return six.text_type(value)", "dict (which would force a nodestore load). All unresolved self.foo", "config) def get_primary_hash(self): # TODO: This *might* need to be", "# this if we do not force a different config.", "EventCommon implementation. def get_event_type(self): if \"type\" in self.snuba_data: return self.snuba_data[\"type\"]", "data[\"event_id\"] = self.event_id data[\"project\"] = self.project_id data[\"release\"] = self.release data[\"dist\"]", "None # deprecated accessors @property def logger(self): warnings.warn(\"Event.logger is deprecated.", "\"\"\" if \"tags.key\" in self.snuba_data and \"tags.value\" in self.snuba_data: keys", "EventDict(CanonicalKeyDict): \"\"\" Creating an instance of this dictionary will send", "in snuba, then you should use `SnubaEvent.selected_colums` (or a subset", "you know you are going to need the entire event", "might be useful for implementing tag deletions without having to", "after calling that function the event data has been modified", "\"\"\" # If we have hashes stored in the data", "blobs. \"\"\" if \"tags.key\" in self.snuba_data and \"tags.value\" in self.snuba_data:", "everything we got from nodestore node_id = SnubaEvent.generate_node_id( self.snuba_data[\"project_id\"], self.snuba_data[\"event_id\"]", "\"\"\" assert all(k in snuba_values for k in SnubaEvent.minimal_columns) #", "dynamically generated data data[\"title\"] = self.title data[\"location\"] = self.location return", "to a world where we never # have to reference", "if isinstance(force_config, six.string_types): stored_config = self.get_grouping_config() config = dict(stored_config) config[\"id\"]", "# self.snuba_data is a dict of all the stuff we", "== \"project\": return self.event.project.get_full_name() elif name == \"projectID\": return self.event.project.slug", "why is this not a property? warnings.warn(\"Event.error is deprecated, use", "release(self): return self.get_tag(\"sentry:release\") @property def dist(self): return self.get_tag(\"sentry:dist\") def get_raw_data(self):", "@property def organization(self): return self.project.organization @property def version(self): return self.data.get(\"version\",", "six.text_type(value) elif name == \"project\": return self.event.project.get_full_name() elif name ==", "\"shortID\" and self.event.group_id: return self.event.group.qualified_short_id elif name == \"orgID\": return", "for t, v in self.get_tags(): if t == key: return", "import User return User.to_python( { \"id\": self.user_id, \"email\": self.email, \"username\":", "# vs ((tag, foo), (tag, bar)) return [] # For", "data this event was initialized with, we may have the", "remove this code once it's unused. It's still # being", "EventDict) ) if not skip_renormalization and not is_renormalized: normalizer =", "these # properties need to be stripped out in __getstate__.", "few properties, and they are all available in snuba, then", "self.time_spent data[\"tags\"] = [(k.split(\"sentry:\", 1)[-1], v) for (k, v) in", "= project def get_interfaces(self): return CanonicalKeyView(get_interfaces(self.data)) @memoize def interfaces(self): return", "models.CharField(max_length=64, null=True) datetime = models.DateTimeField(default=timezone.now, db_index=True) time_spent = BoundedIntegerField(null=True) data", "\"message\") or self.message ) def get_event_type(self): \"\"\" Return the type", "def get_tags(self): return self.tags def get_tag(self, key): for t, v", "save. The basic event data is fetched from snuba, and", "just initialize the event with `SnubaEvent.minimal_colums` and let the rest", "thing is also clunky because these # properties need to", "nodestore data. # ============================================ @property def tags(self): \"\"\" Override of", "self.event.group_id: return self.event.group.qualified_short_id elif name == \"orgID\": return self.event.organization.slug elif", "import string import warnings import pytz from collections import OrderedDict", "'User' interface object that gives us enough information to render", "Event(EventCommon, Model): \"\"\" An event backed by data stored in", "# We use a OrderedDict to keep elements ordered for", "by plugins. def get_tags(self): return self.tags def get_tag(self, key): for", "= ((\"project_id\", \"event_id\"),) index_together = ((\"group_id\", \"datetime\"),) __repr__ = sane_repr(\"project_id\",", "we do not force a different config. if force_config is", "still used by plugins. def get_tags(self): return self.tags def get_tag(self,", "about state.pop(\"_project_cache\", None) state.pop(\"_environment_cache\", None) state.pop(\"_group_cache\", None) state.pop(\"interfaces\", None) return", "going to need the entire event body anyway (which requires", "from sentry.utils.safe import get_path from sentry.utils.strings import truncatechars class EventDict(CanonicalKeyDict):", "of everything we got from nodestore node_id = SnubaEvent.generate_node_id( self.snuba_data[\"project_id\"],", "info. We want to fetch this on demand # again.", "planning on loading the entire event body from # nodestore", "return data # ============================================ # DEPRECATED # ============================================ @property def", "\"tags.value\" in self.snuba_data: keys = getattr(self, \"tags.key\") values = getattr(self,", "we would # pickle a CanonicalKeyView which old sentry workers", "def real_message(self): # XXX(mitsuhiko): this is a transitional attribute that", "is deprecated. Use Event.tags instead.\", DeprecationWarning) return self.get_tag(\"site\") @property def", "unresolved self.foo type accesses will come through here. \"\"\" if", "sentry.utils.cache import memoize from sentry.utils.canonical import CanonicalKeyDict, CanonicalKeyView from sentry.utils.safe", "get_tag(self, key): for t, v in self.get_tags(): if t ==", "creating an event object (or loading it from the DB)", "variants being created. This means that after calling that function", "snuba timestamp \"\"\" # dateutil seems to use tzlocal() instead", "sane_repr, ) from sentry.db.models.manager import EventManager from sentry.interfaces.base import get_interfaces", "they are all available in snuba, then you should use", "if not hasattr(self, \"_project_cache\"): self._project_cache = Project.objects.get(id=self.project_id) return self._project_cache @project.setter", "we assume # it was already normalized on the way", "= force_config else: config = force_config # Otherwise we just", "absolute_import import six import string import warnings import pytz from", "as well only fetch the minimum from snuba to #", "fall back to generating new ones from the data. We", "in self.snuba_data: return self.snuba_data[\"platform\"] return self.data.get(\"platform\") @property def id(self): #", "ugettext_lazy as _ from hashlib import md5 from semaphore.processing import", "null=True, db_column=\"message_id\") project_id = BoundedBigIntegerField(blank=True, null=True) message = models.TextField() platform", "stored in snuba. This is a readonly event and does", "name[4:] value = self.event.get_tag(self.tag_aliases.get(name, name)) if value is None: raise", "@property def level(self): # we might want to move to", "# DEPRECATED # ============================================ @property def level(self): # we might", "or # this method could return what currently is real_message.", "if force_config is None: hashes = self.data.get(\"hashes\") if hashes is", "move to this: # return self.get_tag('level') or self.group.get_level_display() if self.group:", "the stored information if available. Grouping hashes will take into", "initializing a SnubaEvent, think about the attributes you might need", "particular if we were to pickle interfaces we would #", "\"sentry:dist\", \"user\": \"sentry:user\"} def __init__(self, event): self.event = event def", "= self.platform data[\"message\"] = self.real_message data[\"datetime\"] = self.datetime data[\"time_spent\"] =", "it from the DB) will ensure the data fits the", "interfaces we would # pickle a CanonicalKeyView which old sentry", "sentry workers do not know # about state.pop(\"_project_cache\", None) state.pop(\"_environment_cache\",", "load). All unresolved self.foo type accesses will come through here.", "or get_path(self.data, \"logentry\", \"message\") or self.message ) def get_event_type(self): \"\"\"", "\"ip_address\": self.ip_address, } ) # If the data for these", "\"utf-8\" ) def get_environment(self): from sentry.models import Environment if not", "dict(stored_config) config[\"id\"] = force_config else: config = force_config # Otherwise", "available in snuba, then you should use `SnubaEvent.selected_colums` (or a", "the same grouping config as stored. if # this is", "if \"type\" in self.snuba_data: return self.snuba_data[\"type\"] return super(SnubaEvent, self).get_event_type() @property", "import get_path from sentry.utils.strings import truncatechars class EventDict(CanonicalKeyDict): \"\"\" Creating", "data fits the type schema. \"\"\" def __init__(self, data, skip_renormalization=False,", "metadata of this event. See ``sentry.eventtypes``. \"\"\" # For some", "from snuba. selected_columns = minimal_columns + [ \"culprit\", \"location\", \"message\",", "six.iteritems(v) if v_k != \"client_ip\"} data[k] = v # for", "bound to the data property in the same way as", "in data: continue if k == \"sdk\": v = {v_k:", "\"message\", \"platform\", \"title\", \"type\", # Required to provide snuba-only tags", "just return # the hex event_id here. We should be", "timestamp \"\"\" # dateutil seems to use tzlocal() instead of", "loading it from the DB) will ensure the data fits", "\"user\": \"sentry:user\"} def __init__(self, event): self.event = event def __getitem__(self,", "to look in the `data` dict (which would force a", "None: raise KeyError return six.text_type(value) elif name == \"project\": return", "force_config=None, normalize_stacktraces=False): \"\"\" This is similar to `get_hashes` but will", "return self.snuba_data[\"message\"] return self.data.get(\"message\") @property def platform(self): if \"platform\" in", "columns we can get from snuba. selected_columns = minimal_columns +", "\"culprit\" in self.snuba_data: return self.snuba_data[\"culprit\"] return super(SnubaEvent, self).culprit @property def", "dictionary will send the event through basic (Rust-based) type/schema validation", "an # event. If the client is planning on loading", "DeprecationWarning) return \"\" def error(self): # TODO why is this", "a reference bug with unsaved # models. But the current", "return self.snuba_data[\"title\"] return super(SnubaEvent, self).title @property def culprit(self): if \"culprit\"", "have some cases where the data # is completely empty.", "return v return None @property def release(self): return self.get_tag(\"sentry:release\") @property", "is not None ] ) return rv except ValueError: #", "use tzlocal() instead of UTC even though the string #", "import memoize from sentry.utils.canonical import CanonicalKeyDict, CanonicalKeyView from sentry.utils.safe import", "sentry.models import Group if not self.group_id: return None if not", "plugins should instead swithc to the actual message attribute or", "for (k, v) in self.tags] for k, v in sorted(six.iteritems(self.data)):", "This is similar to `get_hashes` but will instead return the", "instead return the grouping components for each variant in a", "values = getattr(self, \"tags.value\") if keys and values and len(keys)", "for implementing tag deletions without having to rewrite nodestore blobs.", "return super(SnubaEvent, self).tags def get_minimal_user(self): from sentry.interfaces.user import User return", "the data fits the type schema. \"\"\" def __init__(self, data,", "from the DB) will ensure the data fits the type", "or # default config dictionary if force_config is not None:", "the group. if data.get(\"culprit\") is None and self.group_id: data[\"culprit\"] =", "modified for `in_app` in addition to event variants being created.", "@property def culprit(self): # For a while events did not", "use the same grouping config as stored. if # this", "hashes for the event. This uses the stored information if", "None: hashes = self.data.get(\"hashes\") if hashes is not None: return", "EventDict) or ( isinstance(data, NodeData) and isinstance(data.data, EventDict) ) if", "self.data.get(\"metadata\") or {} def get_grouping_config(self): \"\"\"Returns the event grouping config.\"\"\"", "ends with '+00:00', so just replace the TZ with UTC", "semaphore.processing import StoreNormalizer from sentry import eventtypes from sentry.db.models import", "get_grouping_config_dict_for_event_data(self.data, self.project) def get_hashes(self, force_config=None): \"\"\" Returns the calculated hashes", "as (foo, bar) # vs ((tag, foo), (tag, bar)) return", "`in_app` in addition to event variants being created. This means", "workers do not know # about state.pop(\"_project_cache\", None) state.pop(\"_environment_cache\", None)", "dictionary if force_config is not None: if isinstance(force_config, six.string_types): stored_config", "\"_group_cache\", \"_environment_cache\"): raise AttributeError() if name in self.snuba_data: return self.snuba_data[name]", "snuba event will never have a django row id, just", "\"title\": return self.event.title raise KeyError DEFAULT_SUBJECT_TEMPLATE = EventSubjectTemplate(\"$shortID - $title\")", "IndexError? return self.get_hashes()[0] @property def title(self): # also see event_manager.py", "event def __getitem__(self, name): if name.startswith(\"tag:\"): name = name[4:] value", "= DEFAULT_SUBJECT_TEMPLATE return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode( \"utf-8\" ) def get_environment(self): from", "message rename is through # plugins should instead swithc to", "this event. See ``sentry.eventtypes``. \"\"\" return self.data.get(\"type\", \"default\") def get_event_metadata(self):", "and does not support event creation or save. The basic", "models.TextField() platform = models.CharField(max_length=64, null=True) datetime = models.DateTimeField(default=timezone.now, db_index=True) time_spent", "in self.snuba_data: return self.snuba_data[\"culprit\"] return super(SnubaEvent, self).culprit @property def location(self):", "tzlocal() instead of UTC even though the string # ends", "self.snuba_data[\"location\"] return super(SnubaEvent, self).location # ==================================================== # Snuba implementations of", "return self.snuba_data[\"platform\"] return self.data.get(\"platform\") @property def id(self): # Because a", "DeprecationWarning) return self.title error.short_description = _(\"error\") @property def message_short(self): warnings.warn(\"Event.message_short", "def get_interfaces(self): return CanonicalKeyView(get_interfaces(self.data)) @memoize def interfaces(self): return self.get_interfaces() def", "= EventSubjectTemplate(template) else: template = DEFAULT_SUBJECT_TEMPLATE return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode( \"utf-8\"", "= EventManager() class Meta: app_label = \"sentry\" db_table = \"sentry_message\"", ") return self.get_tag(\"server_name\") @property def checksum(self): warnings.warn(\"Event.checksum is no longer", "as a wrapper type for `Event.data` such that creating an", "BoundedBigIntegerField(blank=True, null=True) message = models.TextField() platform = models.CharField(max_length=64, null=True) datetime", "datetime = models.DateTimeField(default=timezone.now, db_index=True) time_spent = BoundedIntegerField(null=True) data = NodeField(", "data = NodeField( blank=True, null=True, ref_func=lambda x: x.project_id or x.project.id,", "\"5\") @property def ip_address(self): ip_address = get_path(self.data, \"user\", \"ip_address\") if", "us enough information to render a user badge. \"\"\" return", "NodeField( blank=True, null=True, ref_func=lambda x: x.project_id or x.project.id, ref_version=2, wrapper=EventDict,", "a regular Event. \"\"\" # The minimal list of columns", "return len(json.dumps(dict(self.data))) @property def transaction(self): return self.get_tag(\"transaction\") def get_email_subject(self): template", "the data # is completely empty. In that case we", "x: x.project_id or x.project.id, ref_version=2, wrapper=EventDict, ) objects = EventManager()", "for k, v in sorted(six.iteritems(self.data)): if k in data: continue", "= getattr(self, \"tags.key\") values = getattr(self, \"tags.value\") if keys and", "attribute that should be # removed. `message` will be renamed", "stripped out in __getstate__. @property def group(self): from sentry.models import", "will be modified for `in_app` in addition to event variants", "from django.utils.translation import ugettext_lazy as _ from hashlib import md5", "def __getitem__(self, name): if name.startswith(\"tag:\"): name = name[4:] value =", "deprecated, use Event.title\", DeprecationWarning) return self.title class SnubaEvent(EventCommon): \"\"\" An", "self.ip_address, } ) # If the data for these is", "event_id which together are globally unique. The event body should", "in nodestore so it can be retrieved using the same", "v # for a long time culprit was not persisted.", "basic (Rust-based) type/schema validation called \"re-normalization\". This is used as", "data will be modified for `in_app` in addition to event", "index_together = ((\"group_id\", \"datetime\"),) __repr__ = sane_repr(\"project_id\", \"group_id\") def __getstate__(self):", "normalize_stacktraces=False): \"\"\" This is similar to `get_hashes` but will instead", "can get from snuba. selected_columns = minimal_columns + [ \"culprit\",", "django.db import models from django.utils import timezone from django.utils.translation import", "LOG_LEVELS_MAP.get(self.get_level_display()) or self.group.level if self.group: return self.group.level else: return None", "warnings import pytz from collections import OrderedDict from dateutil.parser import", "swithc to the actual message attribute or # this method", "if v_k != \"client_ip\"} data[k] = v # for a", "available to return, or we may have to look in", "got from snuba self.snuba_data = snuba_values # self.data is a", "# return self.get_tag('level') or self.group.get_level_display() if self.group: return self.group.get_level_display() else:", "\"project_id\", \"timestamp\"] # A list of all useful columns we", "= v # for a long time culprit was not", "sane_repr(\"project_id\", \"group_id\") def __init__(self, snuba_values): \"\"\" When initializing a SnubaEvent,", "skip_renormalization and not is_renormalized: normalizer = StoreNormalizer(is_renormalize=True, enable_trimming=False) data =", "elif name == \"orgID\": return self.event.organization.slug elif name == \"title\":", "being created. This means that after calling that function the", "to be protected from an IndexError? return self.get_hashes()[0] @property def", "data[\"platform\"] = self.platform data[\"message\"] = self.real_message data[\"datetime\"] = self.datetime data[\"time_spent\"]", "from sentry.db.models.manager import EventManager from sentry.interfaces.base import get_interfaces from sentry.utils", "def save(self): raise NotImplementedError class Event(EventCommon, Model): \"\"\" An event", "state.pop(\"interfaces\", None) return state class EventSubjectTemplate(string.Template): idpattern = r\"(tag:)?[_a-z][_a-z0-9]*\" class", "to need the entire event body anyway (which requires a", "SnubaEvent.minimal_columns) # self.snuba_data is a dict of all the stuff", "# Required to provide snuba-only tags \"tags.key\", \"tags.value\", # Required", "= BoundedIntegerField(null=True) data = NodeField( blank=True, null=True, ref_func=lambda x: x.project_id", "This is used as a wrapper type for `Event.data` such", "class Meta: app_label = \"sentry\" db_table = \"sentry_message\" verbose_name =", "in snuba_values for k in SnubaEvent.minimal_columns) # self.snuba_data is a", "trick as there is a reference bug with unsaved #", "from sentry.db.models import ( BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr,", "the data property in the same way as a regular", "json from sentry.utils.cache import memoize from sentry.utils.canonical import CanonicalKeyDict, CanonicalKeyView", "sentry.models import Project if not hasattr(self, \"_project_cache\"): self._project_cache = Project.objects.get(id=self.project_id)", "# ==================================================== # Snuba implementations of the django fields on", "return self.get_interfaces() def get_interface(self, name): return self.interfaces.get(name) def get_legacy_message(self): #", "== len(values): return sorted(zip(keys, values)) else: return [] else: return", "User.to_python( { \"id\": self.user_id, \"email\": self.email, \"username\": self.username, \"ip_address\": self.ip_address,", "The minimal list of columns we need to get from", "bootstrap an # event. If the client is planning on", "data available to return, or we may have to look", "ip_address remote_addr = get_path(self.data, \"request\", \"env\", \"REMOTE_ADDR\") if remote_addr: return", "it's merged with the stored or # default config dictionary", "snuba et = eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) @property def real_message(self): #", "models. But the current _group_cache thing is also clunky because", "want to hobble along # further. return self.data.get(\"metadata\") or {}", "in and we can just return # it, otherwise we", "need to access on it. If you only need a", "schema. \"\"\" def __init__(self, data, skip_renormalization=False, **kwargs): is_renormalized = isinstance(data,", "get_path(self.data, \"logentry\", \"message\") or \"\" ) @property def organization(self): return", "from snuba are UTC. return parse_date(self.timestamp).replace(tzinfo=pytz.utc) @property def time_spent(self): return", "ref_func=lambda x: x.project_id or x.project.id, ref_version=2, wrapper=EventDict, ) objects =", "__core__ = False group_id = BoundedBigIntegerField(blank=True, null=True) event_id = models.CharField(max_length=32,", "= \"sentry_message\" verbose_name = _(\"message\") verbose_name_plural = _(\"messages\") unique_together =", "\"re-normalization\". This is used as a wrapper type for `Event.data`", "ordered for a potential JSON serializer data = OrderedDict() data[\"event_id\"]", "though the string # ends with '+00:00', so just replace", "datetime of this event from the snuba timestamp \"\"\" #", "ip_address: return ip_address remote_addr = get_path(self.data, \"request\", \"env\", \"REMOTE_ADDR\") if", "only use # this if we do not force a", "self.group.get_level_display() if self.group: return self.group.get_level_display() else: return None # deprecated", "return self.data.get(\"version\", \"5\") @property def ip_address(self): ip_address = get_path(self.data, \"user\",", "save(self): raise NotImplementedError class Event(EventCommon, Model): \"\"\" An event backed", "= force_config # Otherwise we just use the same grouping", "a potential JSON serializer data = OrderedDict() data[\"event_id\"] = self.event_id", "through basic (Rust-based) type/schema validation called \"re-normalization\". This is used", "self.group.culprit # Override title and location with dynamically generated data", "import pytz from collections import OrderedDict from dateutil.parser import parse", "so it can be retrieved using the same generated id", "get_event_metadata(self): \"\"\" Return the metadata of this event. See ``sentry.eventtypes``.", "deprecated. Use Event.tags instead.\", DeprecationWarning) return self.get_tag(\"site\") @property def server_name(self):", "the hex event_id here. We should be moving to a", "super(SnubaEvent, self).ip_address @property def title(self): if \"title\" in self.snuba_data: return", "name == \"project\": return self.event.project.get_full_name() elif name == \"projectID\": return", "not None ] ) return rv except ValueError: # at", "which inserts this for snuba et = eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata())", "want to fetch this on demand # again. In particular", "get_path(self.data, \"tags\", filter=True) or () if t is not None", "should be # removed. `message` will be renamed to `search_message`", "by data stored in snuba. This is a readonly event", "from sentry.utils.strings import truncatechars class EventDict(CanonicalKeyDict): \"\"\" Creating an instance", "ones from the data. We can only use # this", "of this dictionary will send the event through basic (Rust-based)", "would # pickle a CanonicalKeyView which old sentry workers do", "normalizer = StoreNormalizer(is_renormalize=True, enable_trimming=False) data = normalizer.normalize_event(dict(data)) CanonicalKeyDict.__init__(self, data, **kwargs)", "a long time culprit was not persisted. In those cases", "\"platform\" in self.snuba_data: return self.snuba_data[\"platform\"] return self.data.get(\"platform\") @property def id(self):", "self.snuba_data[\"culprit\"] return super(SnubaEvent, self).culprit @property def location(self): if \"location\" in", "def get_event_type(self): if \"type\" in self.snuba_data: return self.snuba_data[\"type\"] return super(SnubaEvent,", "eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) @property def real_message(self): # XXX(mitsuhiko): this is", "JSON serializer data = OrderedDict() data[\"event_id\"] = self.event_id data[\"project\"] =", "data[\"title\"] = self.title data[\"location\"] = self.location return data # ============================================", "def time_spent(self): return None @property def message(self): if \"message\" in", "inexplicable reason we have some cases where the data #", "want to move to this: # return LOG_LEVELS_MAP.get(self.get_level_display()) or self.group.level", "or get_path(self.data, \"logentry\", \"message\") or \"\" ) @property def organization(self):", "`search_message` and this # will become `message`. return ( get_path(self.data,", "# TODO (alex) We need a better way to cache", "return self.data.get(\"culprit\") or self.group.culprit return self.data.get(\"culprit\") @property def location(self): #", "\"ip_address\") if ip_address: return ip_address remote_addr = get_path(self.data, \"request\", \"env\",", "nodestore anyway, we may as well only fetch the minimum", "isinstance(data.data, EventDict) ) if not skip_renormalization and not is_renormalized: normalizer", "config = force_config # Otherwise we just use the same", "that gives us enough information to render a user badge.", "# we might want to move to this: # return", "through # plugins should instead swithc to the actual message", "keys and values and len(keys) == len(values): return sorted(zip(keys, values))", "self.get_tag(\"sentry:dist\") def get_raw_data(self): \"\"\"Returns the internal raw event data dict.\"\"\"", "\"platform\", \"title\", \"type\", # Required to provide snuba-only tags \"tags.key\",", "return LOG_LEVELS_MAP.get(self.get_level_display()) or self.group.level if self.group: return self.group.level else: return", "return None if not hasattr(self, \"_group_cache\"): self._group_cache = Group.objects.get(id=self.group_id) return", "import StoreNormalizer from sentry import eventtypes from sentry.db.models import (", "def get_level_display(self): # we might want to move to this:", "CanonicalKeyDict, CanonicalKeyView from sentry.utils.safe import get_path from sentry.utils.strings import truncatechars", "this: # return self.get_tag('level') or self.group.get_level_display() if self.group: return self.group.get_level_display()", "elif name == \"projectID\": return self.event.project.slug elif name == \"shortID\"", "transitional attribute that should be # removed. `message` will be", "Required to provide snuba-only 'user' interface \"email\", \"ip_address\", \"user_id\", \"username\",", "nodestore and bound to the data property in the same", "plugins. def get_tags(self): return self.tags def get_tag(self, key): for t,", "are UTC. return parse_date(self.timestamp).replace(tzinfo=pytz.utc) @property def time_spent(self): return None @property", "are going to need the entire event body anyway (which", "None, [x.get_hash() for x in self.get_grouping_variants(force_config).values()] ) def get_grouping_variants(self, force_config=None,", "way in and we can just return # it, otherwise", "Project.objects.get(id=self.project_id) return self._project_cache @project.setter def project(self, project): if project is", "the trick as there is a reference bug with unsaved", "better way to cache these properties. functools32 # doesn't quite", "use # this if we do not force a different", "for v_k, v_v in six.iteritems(v) if v_k != \"client_ip\"} data[k]", "# For some inexplicable reason we have some cases where", "in normalized form for external consumers.\"\"\" # We use a", "clunky because these # properties need to be stripped out", "For a while events did not save the culprit if", "# further. return self.data.get(\"metadata\") or {} def get_grouping_config(self): \"\"\"Returns the", "import eventtypes from sentry.db.models import ( BoundedBigIntegerField, BoundedIntegerField, Model, NodeData,", "key: return v return None @property def release(self): return self.get_tag(\"sentry:release\")", "`Event.data` such that creating an event object (or loading it", "SnubaEvent. \"\"\" @classmethod def generate_node_id(cls, project_id, event_id): \"\"\" Returns a", "self.data.get(\"platform\") @property def id(self): # Because a snuba event will", "type/schema validation called \"re-normalization\". This is used as a wrapper", "def __init__(self, event): self.event = event def __getitem__(self, name): if", "use Event.title\", DeprecationWarning) return self.title class SnubaEvent(EventCommon): \"\"\" An event", "self._environment_cache = Environment.objects.get( organization_id=self.project.organization_id, name=Environment.get_name_or_default(self.get_tag(\"environment\")), ) return self._environment_cache def get_minimal_user(self):", "truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode( \"utf-8\" ) def get_environment(self): from sentry.models import Environment", "return parse_date(self.timestamp).replace(tzinfo=pytz.utc) @property def time_spent(self): return None @property def message(self):", "normalize_stacktraces_for_grouping # Forcing configs has two separate modes. One is", "the message rename is through # plugins should instead swithc", "# it was already normalized on the way in and", "currently is real_message. return ( get_path(self.data, \"logentry\", \"formatted\") or get_path(self.data,", "snuba, we assume # it was already normalized on the", "\"\"\" @classmethod def generate_node_id(cls, project_id, event_id): \"\"\" Returns a deterministic", "collections import OrderedDict from dateutil.parser import parse as parse_date from", "snuba_values): \"\"\" When initializing a SnubaEvent, think about the attributes", "with dynamically generated data data[\"title\"] = self.title data[\"location\"] = self.location", "reference bug with unsaved # models. But the current _group_cache", "data: continue if k == \"sdk\": v = {v_k: v_v", "get_raw_data(self): \"\"\"Returns the internal raw event data dict.\"\"\" return dict(self.data.items())", "properties need to be stripped out in __getstate__. @property def", "avoid duplicated work. minimal_columns = [\"event_id\", \"group_id\", \"project_id\", \"timestamp\"] #", "force a nodestore load). All unresolved self.foo type accesses will", "return rv except ValueError: # at one point Sentry allowed", "def __init__(self, data, skip_renormalization=False, **kwargs): is_renormalized = isinstance(data, EventDict) or", "load_grouping_config(config) if normalize_stacktraces: normalize_stacktraces_for_grouping(self.data, config) return get_grouping_variants_for_event(self, config) def get_primary_hash(self):", "culprit was not persisted. In those cases put # the", "app_label = \"sentry\" db_table = \"sentry_message\" verbose_name = _(\"message\") verbose_name_plural", "return what currently is real_message. return ( get_path(self.data, \"logentry\", \"formatted\")", "django.utils.translation import ugettext_lazy as _ from hashlib import md5 from", "= group.id self._group_cache = group @property def project(self): from sentry.models", "@property def id(self): # Because a snuba event will never", "to # avoid duplicated work. minimal_columns = [\"event_id\", \"group_id\", \"project_id\",", "use `SnubaEvent.selected_colums` (or a subset depending on your needs) But", "events did not save the culprit if self.group_id: return self.data.get(\"culprit\")", "type schema. \"\"\" def __init__(self, data, skip_renormalization=False, **kwargs): is_renormalized =", "\"REMOTE_ADDR\") if remote_addr: return remote_addr return None @property def tags(self):", "event. If the client is planning on loading the entire", "def get_primary_hash(self): # TODO: This *might* need to be protected", "unique_together = ((\"project_id\", \"event_id\"),) index_together = ((\"group_id\", \"datetime\"),) __repr__ =", "pickle a CanonicalKeyView which old sentry workers do not know", "if keys and values and len(keys) == len(values): return sorted(zip(keys,", "sorted(six.iteritems(self.data)): if k in data: continue if k == \"sdk\":", "return dict(self.data.items()) @property def size(self): return len(json.dumps(dict(self.data))) @property def transaction(self):", "hex event_id here. We should be moving to a world", ") if not skip_renormalization and not is_renormalized: normalizer = StoreNormalizer(is_renormalize=True,", "move to this: # return LOG_LEVELS_MAP.get(self.get_level_display()) or self.group.level if self.group:", "False group_id = BoundedBigIntegerField(blank=True, null=True) event_id = models.CharField(max_length=32, null=True, db_column=\"message_id\")", "return six.text_type(value) elif name == \"project\": return self.event.project.get_full_name() elif name", "we want to hobble along # further. return self.data.get(\"metadata\") or", "# Otherwise we just use the same grouping config as", "len(json.dumps(dict(self.data))) @property def transaction(self): return self.get_tag(\"transaction\") def get_email_subject(self): template =" ]
[ "dataset',default='../../data/sevir') # parser.add_argument('--sevir_catalog', type=str, help='location of SEVIR dataset',default='../../data/CATALOG.csv') # parser.add_argument('--output_location',", "logger.info('Gathering chunk 0/%s:' % n_chunks) X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True) # Create datasets with", "import sys import numpy as np import tensorflow as tf", "training & test datasets using SEVIR') # parser.add_argument('--sevir_data', type=str, help='location", "as hf: hf.create_dataset('IN', data=X[0], maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3])) hf.create_dataset('OUT', data=Y[0], maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3])) # Gather", "tf from nowcast_generator import get_nowcast_test_generator # parser = argparse.ArgumentParser(description='Make nowcast", "hf: hf.create_dataset('IN', data=X[0], maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3])) hf.create_dataset('OUT', data=Y[0], maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3])) # Gather other", "parser.parse_args() def generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10): \"\"\" Runs data processing scripts to extract", "testing data to ' + output_location+'/nowcast_testing.h5') read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks) def read_write_chunks( filename,", "= get_nowcast_test_generator(sevir_catalog,sevir_location) #ogger.info('Reading/writing training data to %s' % ('%s/nowcast_training.h5' %", "#ogger.info('Reading/writing training data to %s' % ('%s/nowcast_training.h5' % args.output_location)) #read_write_chunks('%s/nowcast_training.h5'", "hf['OUT'].resize((hf['OUT'].shape[0] + Y[0].shape[0]), axis = 0) hf['IN'][-X[0].shape[0]:] = X[0] hf['OUT'][-Y[0].shape[0]:]", "chunk 0/%s:' % n_chunks) X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True) # Create datasets with h5py.File(filename,", "dataset',default='../../data/interim') # parser.add_argument('--n_chunks', type=int, help='Number of chucks to use (increase", "SEVIR dataset',default='../../data/CATALOG.csv') # parser.add_argument('--output_location', type=str, help='location of SEVIR dataset',default='../../data/interim') #", "os.environ[\"HDF5_USE_FILE_LOCKING\"]='FALSE' import sys import numpy as np import tensorflow as", "SEVIR \"\"\" logger = logging.getLogger(__name__) logger.info('making final data set from", "'w') as hf: hf.create_dataset('IN', data=X[0], maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3])) hf.create_dataset('OUT', data=Y[0], maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3])) #", "datasets using SEVIR') # parser.add_argument('--sevir_data', type=str, help='location of SEVIR dataset',default='../../data/sevir')", "X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True) # Create datasets with h5py.File(filename, 'w') as hf: hf.create_dataset('IN',", "data to ' + output_location+'/nowcast_testing.h5') read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks) def read_write_chunks( filename, generator,", "for c in range(1,n_chunks+1): offset = c*chunksize n_batches = min(chunksize,len(generator)-offset)", "SEVIR \"\"\" # -*- coding: utf-8 -*- import argparse import", "# -*- coding: utf-8 -*- import argparse import logging import", "chucks to use (increase if memory limited)',default=10) #args = parser.parse_args()", "\"\"\" Makes training and test dataset for nowcasting model using", "training data to %s' % ('%s/nowcast_training.h5' % args.output_location)) #read_write_chunks('%s/nowcast_training.h5' %", "logging import os import h5py os.environ[\"HDF5_USE_FILE_LOCKING\"]='FALSE' import sys import numpy", "SEVIR dataset',default='../../data/interim') # parser.add_argument('--n_chunks', type=int, help='Number of chucks to use", "use (increase if memory limited)',default=10) #args = parser.parse_args() def generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10):", "Create datasets with h5py.File(filename, 'w') as hf: hf.create_dataset('IN', data=X[0], maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3]))", "h5py.File(filename, 'w') as hf: hf.create_dataset('IN', data=X[0], maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3])) hf.create_dataset('OUT', data=Y[0], maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3]))", "data') #trn_generator = get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data) tst_generator = get_nowcast_test_generator(sevir_catalog,sevir_location) #ogger.info('Reading/writing training data", "if memory limited)',default=10) #args = parser.parse_args() def generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10): \"\"\" Runs", "= min(chunksize,len(generator)-offset) if n_batches<0: # all done break logger.info('Gathering chunk", "n_chunks ): logger = logging.getLogger(__name__) chunksize = len(generator)//n_chunks # get", "utf-8 -*- import argparse import logging import os import h5py", "hf['IN'].resize((hf['IN'].shape[0] + X[0].shape[0]), axis = 0) hf['OUT'].resize((hf['OUT'].shape[0] + Y[0].shape[0]), axis", "of SEVIR dataset',default='../../data/CATALOG.csv') # parser.add_argument('--output_location', type=str, help='location of SEVIR dataset',default='../../data/interim')", "maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3])) hf.create_dataset('OUT', data=Y[0], maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3])) # Gather other chunks for c", "-*- coding: utf-8 -*- import argparse import logging import os", "n_chunks) X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True) # Create datasets with h5py.File(filename, 'w') as hf:", "offset = c*chunksize n_batches = min(chunksize,len(generator)-offset) if n_batches<0: # all", "type=str, help='location of SEVIR dataset',default='../../data/sevir') # parser.add_argument('--sevir_catalog', type=str, help='location of", "hf.create_dataset('IN', data=X[0], maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3])) hf.create_dataset('OUT', data=Y[0], maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3])) # Gather other chunks", "argparse.ArgumentParser(description='Make nowcast training & test datasets using SEVIR') # parser.add_argument('--sevir_data',", "data=X[0], maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3])) hf.create_dataset('OUT', data=Y[0], maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3])) # Gather other chunks for", "args.output_location)) #read_write_chunks('%s/nowcast_training.h5' % args.output_location,trn_generator,args.n_chunks) logger.info('Reading/writing testing data to ' +", "len(generator)//n_chunks # get first chunk logger.info('Gathering chunk 0/%s:' % n_chunks)", "np import tensorflow as tf from nowcast_generator import get_nowcast_test_generator #", "other chunks for c in range(1,n_chunks+1): offset = c*chunksize n_batches", "#trn_generator = get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data) tst_generator = get_nowcast_test_generator(sevir_catalog,sevir_location) #ogger.info('Reading/writing training data to", "training set from SEVIR \"\"\" logger = logging.getLogger(__name__) logger.info('making final", "def read_write_chunks( filename, generator, n_chunks ): logger = logging.getLogger(__name__) chunksize", "#args = parser.parse_args() def generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10): \"\"\" Runs data processing scripts", "set from SEVIR \"\"\" logger = logging.getLogger(__name__) logger.info('making final data", "#read_write_chunks('%s/nowcast_training.h5' % args.output_location,trn_generator,args.n_chunks) logger.info('Reading/writing testing data to ' + output_location+'/nowcast_testing.h5')", "training and test dataset for nowcasting model using SEVIR \"\"\"", "# parser.add_argument('--output_location', type=str, help='location of SEVIR dataset',default='../../data/interim') # parser.add_argument('--n_chunks', type=int,", "final data set from raw data') #trn_generator = get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data) tst_generator", "n_batches<0: # all done break logger.info('Gathering chunk %d/%s:' % (c,n_chunks))", "hf.create_dataset('OUT', data=Y[0], maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3])) # Gather other chunks for c in", "(c,n_chunks)) X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True) with h5py.File(filename, 'a') as hf: hf['IN'].resize((hf['IN'].shape[0] + X[0].shape[0]),", "nowcast training & test datasets using SEVIR') # parser.add_argument('--sevir_data', type=str,", "from raw data') #trn_generator = get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data) tst_generator = get_nowcast_test_generator(sevir_catalog,sevir_location) #ogger.info('Reading/writing", "filename, generator, n_chunks ): logger = logging.getLogger(__name__) chunksize = len(generator)//n_chunks", "and test dataset for nowcasting model using SEVIR \"\"\" #", "using SEVIR \"\"\" # -*- coding: utf-8 -*- import argparse", "datasets with h5py.File(filename, 'w') as hf: hf.create_dataset('IN', data=X[0], maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3])) hf.create_dataset('OUT',", "import tensorflow as tf from nowcast_generator import get_nowcast_test_generator # parser", "(increase if memory limited)',default=10) #args = parser.parse_args() def generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10): \"\"\"", "parser.add_argument('--sevir_data', type=str, help='location of SEVIR dataset',default='../../data/sevir') # parser.add_argument('--sevir_catalog', type=str, help='location", "of chucks to use (increase if memory limited)',default=10) #args =", "chunk %d/%s:' % (c,n_chunks)) X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True) with h5py.File(filename, 'a') as hf:", "get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data) tst_generator = get_nowcast_test_generator(sevir_catalog,sevir_location) #ogger.info('Reading/writing training data to %s' %", "first chunk logger.info('Gathering chunk 0/%s:' % n_chunks) X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True) # Create", "# Gather other chunks for c in range(1,n_chunks+1): offset =", "logger = logging.getLogger(__name__) logger.info('making final data set from raw data')", "0/%s:' % n_chunks) X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True) # Create datasets with h5py.File(filename, 'w')", "type=int, help='Number of chucks to use (increase if memory limited)',default=10)", "logging.getLogger(__name__) logger.info('making final data set from raw data') #trn_generator =", "done break logger.info('Gathering chunk %d/%s:' % (c,n_chunks)) X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True) with h5py.File(filename,", "0) hf['OUT'].resize((hf['OUT'].shape[0] + Y[0].shape[0]), axis = 0) hf['IN'][-X[0].shape[0]:] = X[0]", "logger.info('making final data set from raw data') #trn_generator = get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data)", "\"\"\" # -*- coding: utf-8 -*- import argparse import logging", "tst_generator = get_nowcast_test_generator(sevir_catalog,sevir_location) #ogger.info('Reading/writing training data to %s' % ('%s/nowcast_training.h5'", "extract training set from SEVIR \"\"\" logger = logging.getLogger(__name__) logger.info('making", "of SEVIR dataset',default='../../data/sevir') # parser.add_argument('--sevir_catalog', type=str, help='location of SEVIR dataset',default='../../data/CATALOG.csv')", "logger = logging.getLogger(__name__) chunksize = len(generator)//n_chunks # get first chunk", "generator, n_chunks ): logger = logging.getLogger(__name__) chunksize = len(generator)//n_chunks #", "+ X[0].shape[0]), axis = 0) hf['OUT'].resize((hf['OUT'].shape[0] + Y[0].shape[0]), axis =", "% (c,n_chunks)) X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True) with h5py.File(filename, 'a') as hf: hf['IN'].resize((hf['IN'].shape[0] +", "output_location+'/nowcast_testing.h5') read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks) def read_write_chunks( filename, generator, n_chunks ): logger =", "dataset',default='../../data/CATALOG.csv') # parser.add_argument('--output_location', type=str, help='location of SEVIR dataset',default='../../data/interim') # parser.add_argument('--n_chunks',", "test datasets using SEVIR') # parser.add_argument('--sevir_data', type=str, help='location of SEVIR", "h5py os.environ[\"HDF5_USE_FILE_LOCKING\"]='FALSE' import sys import numpy as np import tensorflow", "help='Number of chucks to use (increase if memory limited)',default=10) #args", "processing scripts to extract training set from SEVIR \"\"\" logger", "= logging.getLogger(__name__) logger.info('making final data set from raw data') #trn_generator", "coding: utf-8 -*- import argparse import logging import os import", "# parser = argparse.ArgumentParser(description='Make nowcast training & test datasets using", "get first chunk logger.info('Gathering chunk 0/%s:' % n_chunks) X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True) #", "parser.add_argument('--output_location', type=str, help='location of SEVIR dataset',default='../../data/interim') # parser.add_argument('--n_chunks', type=int, help='Number", "argparse import logging import os import h5py os.environ[\"HDF5_USE_FILE_LOCKING\"]='FALSE' import sys", "# parser.add_argument('--sevir_data', type=str, help='location of SEVIR dataset',default='../../data/sevir') # parser.add_argument('--sevir_catalog', type=str,", "help='location of SEVIR dataset',default='../../data/sevir') # parser.add_argument('--sevir_catalog', type=str, help='location of SEVIR", "Runs data processing scripts to extract training set from SEVIR", "limited)',default=10) #args = parser.parse_args() def generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10): \"\"\" Runs data processing", "): logger = logging.getLogger(__name__) chunksize = len(generator)//n_chunks # get first", "& test datasets using SEVIR') # parser.add_argument('--sevir_data', type=str, help='location of", "\"\"\" logger = logging.getLogger(__name__) logger.info('making final data set from raw", "in range(1,n_chunks+1): offset = c*chunksize n_batches = min(chunksize,len(generator)-offset) if n_batches<0:", "read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks) def read_write_chunks( filename, generator, n_chunks ): logger = logging.getLogger(__name__)", "\"\"\" Runs data processing scripts to extract training set from", "c*chunksize n_batches = min(chunksize,len(generator)-offset) if n_batches<0: # all done break", "<gh_stars>0 \"\"\" Makes training and test dataset for nowcasting model", "type=str, help='location of SEVIR dataset',default='../../data/CATALOG.csv') # parser.add_argument('--output_location', type=str, help='location of", "from nowcast_generator import get_nowcast_test_generator # parser = argparse.ArgumentParser(description='Make nowcast training", "= 0) hf['OUT'].resize((hf['OUT'].shape[0] + Y[0].shape[0]), axis = 0) hf['IN'][-X[0].shape[0]:] =", "import argparse import logging import os import h5py os.environ[\"HDF5_USE_FILE_LOCKING\"]='FALSE' import", "from SEVIR \"\"\" logger = logging.getLogger(__name__) logger.info('making final data set", "data=Y[0], maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3])) # Gather other chunks for c in range(1,n_chunks+1):", "range(1,n_chunks+1): offset = c*chunksize n_batches = min(chunksize,len(generator)-offset) if n_batches<0: #", "import logging import os import h5py os.environ[\"HDF5_USE_FILE_LOCKING\"]='FALSE' import sys import", "get_nowcast_test_generator # parser = argparse.ArgumentParser(description='Make nowcast training & test datasets", "data to %s' % ('%s/nowcast_training.h5' % args.output_location)) #read_write_chunks('%s/nowcast_training.h5' % args.output_location,trn_generator,args.n_chunks)", "c in range(1,n_chunks+1): offset = c*chunksize n_batches = min(chunksize,len(generator)-offset) if", "with h5py.File(filename, 'a') as hf: hf['IN'].resize((hf['IN'].shape[0] + X[0].shape[0]), axis =", "numpy as np import tensorflow as tf from nowcast_generator import", "= len(generator)//n_chunks # get first chunk logger.info('Gathering chunk 0/%s:' %", "'a') as hf: hf['IN'].resize((hf['IN'].shape[0] + X[0].shape[0]), axis = 0) hf['OUT'].resize((hf['OUT'].shape[0]", "logger.info('Reading/writing testing data to ' + output_location+'/nowcast_testing.h5') read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks) def read_write_chunks(", "Makes training and test dataset for nowcasting model using SEVIR", "% n_chunks) X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True) # Create datasets with h5py.File(filename, 'w') as", "# parser.add_argument('--n_chunks', type=int, help='Number of chucks to use (increase if", "sys import numpy as np import tensorflow as tf from", "parser.add_argument('--sevir_catalog', type=str, help='location of SEVIR dataset',default='../../data/CATALOG.csv') # parser.add_argument('--output_location', type=str, help='location", "scripts to extract training set from SEVIR \"\"\" logger =", "model using SEVIR \"\"\" # -*- coding: utf-8 -*- import", "os import h5py os.environ[\"HDF5_USE_FILE_LOCKING\"]='FALSE' import sys import numpy as np", "parser = argparse.ArgumentParser(description='Make nowcast training & test datasets using SEVIR')", "of SEVIR dataset',default='../../data/interim') # parser.add_argument('--n_chunks', type=int, help='Number of chucks to", "generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10): \"\"\" Runs data processing scripts to extract training set", "args.output_location,trn_generator,args.n_chunks) logger.info('Reading/writing testing data to ' + output_location+'/nowcast_testing.h5') read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks) def", "= c*chunksize n_batches = min(chunksize,len(generator)-offset) if n_batches<0: # all done", "n_batches = min(chunksize,len(generator)-offset) if n_batches<0: # all done break logger.info('Gathering", "nowcast_generator import get_nowcast_test_generator # parser = argparse.ArgumentParser(description='Make nowcast training &", "nowcasting model using SEVIR \"\"\" # -*- coding: utf-8 -*-", "%s' % ('%s/nowcast_training.h5' % args.output_location)) #read_write_chunks('%s/nowcast_training.h5' % args.output_location,trn_generator,args.n_chunks) logger.info('Reading/writing testing", "as tf from nowcast_generator import get_nowcast_test_generator # parser = argparse.ArgumentParser(description='Make", "import get_nowcast_test_generator # parser = argparse.ArgumentParser(description='Make nowcast training & test", "def generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10): \"\"\" Runs data processing scripts to extract training", "# Create datasets with h5py.File(filename, 'w') as hf: hf.create_dataset('IN', data=X[0],", "if n_batches<0: # all done break logger.info('Gathering chunk %d/%s:' %", "%d/%s:' % (c,n_chunks)) X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True) with h5py.File(filename, 'a') as hf: hf['IN'].resize((hf['IN'].shape[0]", "= parser.parse_args() def generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10): \"\"\" Runs data processing scripts to", "test dataset for nowcasting model using SEVIR \"\"\" # -*-", "% ('%s/nowcast_training.h5' % args.output_location)) #read_write_chunks('%s/nowcast_training.h5' % args.output_location,trn_generator,args.n_chunks) logger.info('Reading/writing testing data", "('%s/nowcast_training.h5' % args.output_location)) #read_write_chunks('%s/nowcast_training.h5' % args.output_location,trn_generator,args.n_chunks) logger.info('Reading/writing testing data to", "raw data') #trn_generator = get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data) tst_generator = get_nowcast_test_generator(sevir_catalog,sevir_location) #ogger.info('Reading/writing training", "dataset for nowcasting model using SEVIR \"\"\" # -*- coding:", "to extract training set from SEVIR \"\"\" logger = logging.getLogger(__name__)", "as np import tensorflow as tf from nowcast_generator import get_nowcast_test_generator", "= logging.getLogger(__name__) chunksize = len(generator)//n_chunks # get first chunk logger.info('Gathering", "logger.info('Gathering chunk %d/%s:' % (c,n_chunks)) X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True) with h5py.File(filename, 'a') as", "X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True) with h5py.File(filename, 'a') as hf: hf['IN'].resize((hf['IN'].shape[0] + X[0].shape[0]), axis", "to use (increase if memory limited)',default=10) #args = parser.parse_args() def", "all done break logger.info('Gathering chunk %d/%s:' % (c,n_chunks)) X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True) with", "chunk logger.info('Gathering chunk 0/%s:' % n_chunks) X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True) # Create datasets", "X[0].shape[0]), axis = 0) hf['OUT'].resize((hf['OUT'].shape[0] + Y[0].shape[0]), axis = 0)", "# parser.add_argument('--sevir_catalog', type=str, help='location of SEVIR dataset',default='../../data/CATALOG.csv') # parser.add_argument('--output_location', type=str,", "data processing scripts to extract training set from SEVIR \"\"\"", "import numpy as np import tensorflow as tf from nowcast_generator", "set from raw data') #trn_generator = get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data) tst_generator = get_nowcast_test_generator(sevir_catalog,sevir_location)", "chunks for c in range(1,n_chunks+1): offset = c*chunksize n_batches =", "read_write_chunks( filename, generator, n_chunks ): logger = logging.getLogger(__name__) chunksize =", "min(chunksize,len(generator)-offset) if n_batches<0: # all done break logger.info('Gathering chunk %d/%s:'", "for nowcasting model using SEVIR \"\"\" # -*- coding: utf-8", "= get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data) tst_generator = get_nowcast_test_generator(sevir_catalog,sevir_location) #ogger.info('Reading/writing training data to %s'", "= argparse.ArgumentParser(description='Make nowcast training & test datasets using SEVIR') #", "tensorflow as tf from nowcast_generator import get_nowcast_test_generator # parser =", "-*- import argparse import logging import os import h5py os.environ[\"HDF5_USE_FILE_LOCKING\"]='FALSE'", "to ' + output_location+'/nowcast_testing.h5') read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks) def read_write_chunks( filename, generator, n_chunks", "hf: hf['IN'].resize((hf['IN'].shape[0] + X[0].shape[0]), axis = 0) hf['OUT'].resize((hf['OUT'].shape[0] + Y[0].shape[0]),", "type=str, help='location of SEVIR dataset',default='../../data/interim') # parser.add_argument('--n_chunks', type=int, help='Number of", "as hf: hf['IN'].resize((hf['IN'].shape[0] + X[0].shape[0]), axis = 0) hf['OUT'].resize((hf['OUT'].shape[0] +", "data set from raw data') #trn_generator = get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data) tst_generator =", "import h5py os.environ[\"HDF5_USE_FILE_LOCKING\"]='FALSE' import sys import numpy as np import", "SEVIR') # parser.add_argument('--sevir_data', type=str, help='location of SEVIR dataset',default='../../data/sevir') # parser.add_argument('--sevir_catalog',", "% args.output_location,trn_generator,args.n_chunks) logger.info('Reading/writing testing data to ' + output_location+'/nowcast_testing.h5') read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks)", "import os import h5py os.environ[\"HDF5_USE_FILE_LOCKING\"]='FALSE' import sys import numpy as", "Y[0].shape[0]), axis = 0) hf['IN'][-X[0].shape[0]:] = X[0] hf['OUT'][-Y[0].shape[0]:] = Y[0]", "break logger.info('Gathering chunk %d/%s:' % (c,n_chunks)) X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True) with h5py.File(filename, 'a')", "memory limited)',default=10) #args = parser.parse_args() def generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10): \"\"\" Runs data", "help='location of SEVIR dataset',default='../../data/interim') # parser.add_argument('--n_chunks', type=int, help='Number of chucks", "% args.output_location)) #read_write_chunks('%s/nowcast_training.h5' % args.output_location,trn_generator,args.n_chunks) logger.info('Reading/writing testing data to '", "get_nowcast_test_generator(sevir_catalog,sevir_location) #ogger.info('Reading/writing training data to %s' % ('%s/nowcast_training.h5' % args.output_location))", "parser.add_argument('--n_chunks', type=int, help='Number of chucks to use (increase if memory", "chunksize = len(generator)//n_chunks # get first chunk logger.info('Gathering chunk 0/%s:'", "# get first chunk logger.info('Gathering chunk 0/%s:' % n_chunks) X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True)", "h5py.File(filename, 'a') as hf: hf['IN'].resize((hf['IN'].shape[0] + X[0].shape[0]), axis = 0)", "' + output_location+'/nowcast_testing.h5') read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks) def read_write_chunks( filename, generator, n_chunks ):", "help='location of SEVIR dataset',default='../../data/CATALOG.csv') # parser.add_argument('--output_location', type=str, help='location of SEVIR", "maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3])) # Gather other chunks for c in range(1,n_chunks+1): offset", "Gather other chunks for c in range(1,n_chunks+1): offset = c*chunksize", "# all done break logger.info('Gathering chunk %d/%s:' % (c,n_chunks)) X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True)", "+ output_location+'/nowcast_testing.h5') read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks) def read_write_chunks( filename, generator, n_chunks ): logger", "logging.getLogger(__name__) chunksize = len(generator)//n_chunks # get first chunk logger.info('Gathering chunk", "SEVIR dataset',default='../../data/sevir') # parser.add_argument('--sevir_catalog', type=str, help='location of SEVIR dataset',default='../../data/CATALOG.csv') #", "using SEVIR') # parser.add_argument('--sevir_data', type=str, help='location of SEVIR dataset',default='../../data/sevir') #", "with h5py.File(filename, 'w') as hf: hf.create_dataset('IN', data=X[0], maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3])) hf.create_dataset('OUT', data=Y[0],", "axis = 0) hf['OUT'].resize((hf['OUT'].shape[0] + Y[0].shape[0]), axis = 0) hf['IN'][-X[0].shape[0]:]", "to %s' % ('%s/nowcast_training.h5' % args.output_location)) #read_write_chunks('%s/nowcast_training.h5' % args.output_location,trn_generator,args.n_chunks) logger.info('Reading/writing", "+ Y[0].shape[0]), axis = 0) hf['IN'][-X[0].shape[0]:] = X[0] hf['OUT'][-Y[0].shape[0]:] =" ]
[ "weightBoneBuffer = mesh.VertexWeightBoneBuffer() weightValueBuffer = mesh.VertexWeightValueBuffer() for x, vert in", "vert[vertexWeightLayer][weightBoneBuffer[x * maximumInfluence] ] = weightValueBuffer[x * maximumInfluence] blendMesh.to_mesh(newMesh) newMesh.create_normals_split()", "= handles[bone.ParentIndex()] bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='POSE') for bone in skeletonObj.pose.bones:", "None armature = bpy.data.armatures.new(\"Joints\") armature.display_type = \"STICK\" skeletonObj = bpy.data.objects.new(name,", "1])) uv.y = 1.0 - uv.y loop[vertexUVLayers[uvLayer]].uv = uv if", "blendMesh.to_mesh(newMesh) newMesh.create_normals_split() if len(vertexNormalLayer) > 0: for x, _loop in", "bpy import bmesh import os import array import math from", "[None] * len(bones) matrices = {} for i, bone in", "Vector((vertexPositions[x], vertexPositions[x + 1], vertexPositions[x + 2]))) blendMesh.verts.ensure_lookup_table() faceLookupMap =", "'S/G-Blender' bpy.ops.wm.append( filepath=os.path.join(file_path, inner_path, object_name), directory=os.path.join(file_path, inner_path), filename=object_name ) except:", "connection.Path())) #画像を読み込み except RuntimeError: pass if texture.image: material.node_tree.links.new( shader.inputs[switcher[slot]], texture.outputs[\"Color\"])", "= model.Meshes() for mesh in meshes: newMesh = bpy.data.meshes.new(\"polySurfaceMesh\") blendMesh", "[mesh.VertexUVLayerBuffer( x) for x in range(mesh.UVLayerCount())] def vertexToFaceVertex(face): for x,", "newBone = armature.edit_bones.new(bone.Name()) newBone.tail = 0, 0.05, 0 # I", "= bpy.data.images.load( utilityBuildPath(path, connection.Path())) #画像を読み込み except RuntimeError: pass if texture.image:", "is not None: return material.Name(), materialNew materialNew = bpy.data.materials.new(name=material.Name()) materialNew.use_nodes", "[blendMesh.verts[faces[faceStart + faceLookupMap[0]]], blendMesh.verts[faces[faceStart + faceLookupMap[1]]], blendMesh.verts[faces[faceStart + faceLookupMap[2]]]] try:", "matTranslation = Matrix.Translation(Vector(bone.LocalPosition())) matrices[bone.Name()] = matTranslation @ matRotation handles[i] =", "_loop in enumerate(newMesh.loops): newMesh.loops[x].normal = vertexNormalLayer[x] newMesh.validate(clean_customdata=False) clnors = array.array('f',", "in skeletonObj.pose.bones: meshObj.vertex_groups.new(name=bone.name) meshObj.parent = skeletonObj modifier = meshObj.modifiers.new('Armature Rig',", "= material.node_tree.nodes.new(\"ShaderNodeGroup\") output = material.node_tree.nodes['Material Output'] # グループシェーダーを作成 shader.node_tree =", "x in model.Materials())} meshes = model.Meshes() for mesh in meshes:", "os import array import math from mathutils import * from", "# Blender really only wants a BSDF shader node #", "0] vertexNormalLayer = [] vertexNormals = mesh.VertexNormalBuffer() vertexColors = mesh.VertexColorBuffer()", "= armature.edit_bones.new(bone.Name()) newBone.tail = 0, 0.05, 0 # I am", "modifier.use_vertex_groups = True def importRootNode(node, path): for child in node.ChildrenOfType(Model):", "inner_path = 'NodeTree' object_name = 'S/G-Blender' bpy.ops.wm.append( filepath=os.path.join(file_path, inner_path, object_name),", "x, _loop in enumerate(newMesh.loops): newMesh.loops[x].normal = vertexNormalLayer[x] newMesh.validate(clean_customdata=False) clnors =", "else: material.node_tree.nodes.remove(texture) def importSkeletonNode(name, skeleton): if skeleton is None: return", "3], vertexNormals[( vertexIndex * 3) + 1], vertexNormals[(vertexIndex * 3)", "\"AO map\", \"cavity\": \"Cavity map\", \"gloss\": \"Glossiness map\", \"normal\": \"Normal", "value for (key, value) in ( importMaterialNode(path, x) for x", "(weightValueBuffer[x * maximumInfluence] > 0.0): vert[vertexWeightLayer][weightBoneBuffer[x * maximumInfluence] ] =", "maximumInfluence = mesh.MaximumWeightInfluence() if maximumInfluence > 0: weightBoneBuffer = mesh.VertexWeightBoneBuffer()", "so we're gonna give it one utilityAssignBSDFMaterialSlots(materialNew, material.Slots(), path) return", "# Update the scene, reset view mode before returning. bpy.context.view_layer.update()", "RuntimeError: pass if texture.image: material.node_tree.links.new( shader.inputs[switcher[slot]], texture.outputs[\"Color\"]) material.node_tree.links.new(shader.outputs[0], output.inputs[0]) #", "materialNew = bpy.data.materials.get(material.Name()) if materialNew is not None: return material.Name(),", "vertexUVLayers = [blendMesh.loops.layers.uv.new( \"map%d\" % x) for x in range(mesh.UVLayerCount())]", "indices = [blendMesh.verts[faces[faceStart + faceLookupMap[0]]], blendMesh.verts[faces[faceStart + faceLookupMap[1]]], blendMesh.verts[faces[faceStart +", "= material.node_tree.nodes['Material Output'] # グループシェーダーを作成 shader.node_tree = bpy.data.node_groups['S/G-Blender'] #テクスチャを定義 switcher", "\"Cavity map\", \"gloss\": \"Glossiness map\", \"normal\": \"Normal map\", \"emissive\": \"Emission", "= 0, 0.05, 0 # I am sorry but blender", "# I am sorry but blender sucks tempQuat = bone.LocalRotation()", "it one utilityAssignBSDFMaterialSlots(materialNew, material.Slots(), path) return material.Name(), materialNew def importModelNode(model,", "= bpy.data.objects.new(name, armature) skeletonObj.show_in_front = True bpy.context.view_layer.active_layer_collection.collection.objects.link( skeletonObj) bpy.context.view_layer.objects.active =", "return False # Parse and load cast nodes importCast(filepath) #", "if vertexColors is not None: loop[vertexColorLayer] = [ (vertexColors[vertexIndex] >>", "by Nick # Copyright (c) 2020 Nick import bpy import", "for i, bone in enumerate(bones): if bone.ParentIndex() > -1: handles[i].parent", "2]))) blendMesh.verts.ensure_lookup_table() faceLookupMap = [1, 2, 0] vertexNormalLayer = []", "the scene, reset view mode before returning. bpy.context.view_layer.update() bpy.ops.object.mode_set(mode=\"OBJECT\") return", "* 3], vertexNormals[( vertexIndex * 3) + 1], vertexNormals[(vertexIndex *", "filename=object_name ) except: self.report({'ERROR'}, 'Set the Shader path in AddonPreferences", "= skeleton.Bones() handles = [None] * len(bones) matrices = {}", "Copyright (c) 2020 Nick import bpy import bmesh import os", "in range(0, len(vertexPositions), 3): blendMesh.verts.new( Vector((vertexPositions[x], vertexPositions[x + 1], vertexPositions[x", "meshes: newMesh = bpy.data.meshes.new(\"polySurfaceMesh\") blendMesh = bmesh.new() vertexColorLayer = blendMesh.loops.layers.color.new(\"color1\")", "2, 0] vertexNormalLayer = [] vertexNormals = mesh.VertexNormalBuffer() vertexColors =", "\"cavity\": \"Cavity map\", \"gloss\": \"Glossiness map\", \"normal\": \"Normal map\", \"emissive\":", "asset root = os.path.dirname(root) return os.path.join(root, asset) def utilityAssignBSDFMaterialSlots(material, slots,", "in meshes: newMesh = bpy.data.meshes.new(\"polySurfaceMesh\") blendMesh = bmesh.new() vertexColorLayer =", "len(faces), 3): indices = [blendMesh.verts[faces[faceStart + faceLookupMap[0]]], blendMesh.verts[faces[faceStart + faceLookupMap[1]]],", "255.0 for i in (24, 16, 8, 0)] faces =", "= bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path try: file_path = shader_path inner_path = 'NodeTree' object_name", "vertexUVs = [mesh.VertexUVLayerBuffer( x) for x in range(mesh.UVLayerCount())] def vertexToFaceVertex(face):", "slots for slot in slots: connection = slots[slot] if not", "= Vector( (vertexUVs[uvLayer][vertexIndex * 2], vertexUVs[uvLayer][(vertexIndex * 2) + 1]))", "\"Diffuse map\", \"specular\": \"Specular map\", \"ao\": \"AO map\", \"cavity\": \"Cavity", "True bpy.context.view_layer.active_layer_collection.collection.objects.link( skeletonObj) bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='EDIT') bones = skeleton.Bones()", "bpy.context.view_layer.active_layer_collection.collection.objects.link( skeletonObj) bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='EDIT') bones = skeleton.Bones() handles", "handles[i] = newBone for i, bone in enumerate(bones): if bone.ParentIndex()", "importCast(path): cast = Cast() cast.load(path) for root in cast.Roots(): importRootNode(root,", "= bpy.data.armatures.new(\"Joints\") armature.display_type = \"STICK\" skeletonObj = bpy.data.objects.new(name, armature) skeletonObj.show_in_front", "シェーダーをアペンド shader_path = bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path try: file_path = shader_path inner_path =", "Material, File def utilityBuildPath(root, asset): if os.path.isabs(asset): return asset root", "slots: connection = slots[slot] if not connection.__class__ is File: continue", "enumerate(newMesh.loops): newMesh.loops[x].normal = vertexNormalLayer[x] newMesh.validate(clean_customdata=False) clnors = array.array('f', [0.0] *", "for faceStart in range(0, len(faces), 3): indices = [blendMesh.verts[faces[faceStart +", "skeletonObj modifier.use_bone_envelopes = False modifier.use_vertex_groups = True def importRootNode(node, path):", "0 # I am sorry but blender sucks tempQuat =", "bpy.data.objects.new(name, armature) skeletonObj.show_in_front = True bpy.context.view_layer.active_layer_collection.collection.objects.link( skeletonObj) bpy.context.view_layer.objects.active = skeletonObj", "Also sucks, WXYZ? => XYZW master race matRotation = Quaternion(", "return os.path.join(root, asset) def utilityAssignBSDFMaterialSlots(material, slots, path): material.node_tree.nodes.remove( material.node_tree.nodes[\"Principled BSDF\"])", "import array import math from mathutils import * from bpy_extras.image_utils", "filepath=\"\"): # シェーダーをアペンド shader_path = bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path try: file_path = shader_path", "Original importer was created by Nick # Copyright (c) 2020", "\"use_smooth\", [True] * len(newMesh.polygons)) newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3))) newMesh.use_auto_smooth = True", "3))) newMesh.use_auto_smooth = True meshObj = bpy.data.objects.new(\"CastMesh\", newMesh) bpy.context.view_layer.active_layer_collection.collection.objects.link( meshObj)", "Loop and connect the slots for slot in slots: connection", "not None: loop[vertexColorLayer] = [ (vertexColors[vertexIndex] >> i & 0xff)", "<filename>blender-plugin/import_cast.py # The Original importer was created by Nick #", "= faces[faceStart + faceLookupMap[x]] if vertexNormals is not None: vertexNormalLayer.append((vertexNormals[vertexIndex", "bpy.context.view_layer.objects.active = meshObj meshMaterial = mesh.Material() if meshMaterial is not", "meshObj.modifiers.new('Armature Rig', 'ARMATURE') modifier.object = skeletonObj modifier.use_bone_envelopes = False modifier.use_vertex_groups", "in range(0, len(faces), 3): indices = [blendMesh.verts[faces[faceStart + faceLookupMap[0]]], blendMesh.verts[faces[faceStart", "skeletonObj modifier = meshObj.modifiers.new('Armature Rig', 'ARMATURE') modifier.object = skeletonObj modifier.use_bone_envelopes", "= bpy.data.objects.new(\"CastMesh\", newMesh) bpy.context.view_layer.active_layer_collection.collection.objects.link( meshObj) bpy.context.view_layer.objects.active = meshObj meshMaterial =", "texture.outputのカラーとinputをつなげる(link) else: material.node_tree.nodes.remove(texture) def importSkeletonNode(name, skeleton): if skeleton is None:", "+ 1], vertexPositions[x + 2]))) blendMesh.verts.ensure_lookup_table() faceLookupMap = [1, 2,", "* 3))) newMesh.use_auto_smooth = True meshObj = bpy.data.objects.new(\"CastMesh\", newMesh) bpy.context.view_layer.active_layer_collection.collection.objects.link(", "= bpy.data.node_groups['S/G-Blender'] #テクスチャを定義 switcher = { \"albedo\": \"Diffuse map\", \"diffuse\":", "from the path modelName = os.path.splitext(os.path.basename(path))[0] # Import skeleton for", "len(newMesh.polygons)) newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3))) newMesh.use_auto_smooth = True meshObj = bpy.data.objects.new(\"CastMesh\",", "texture = material.node_tree.nodes.new(\"ShaderNodeTexImage\") #画像ノードを作成 try: texture.image = bpy.data.images.load( utilityBuildPath(path, connection.Path()))", "# for child in node.ChildrenOfType(Animation): # importAnimationNode(child, path) def importCast(path):", "Bone, Material, File def utilityBuildPath(root, asset): if os.path.isabs(asset): return asset", "already created the material, ignore this materialNew = bpy.data.materials.get(material.Name()) if", "def importRootNode(node, path): for child in node.ChildrenOfType(Model): importModelNode(child, path) #", "from .cast import Cast, Model, Animation, Curve, NotificationTrack, Mesh, Skeleton,", "def load(self, context, filepath=\"\"): # シェーダーをアペンド shader_path = bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path try:", "3): blendMesh.verts.new( Vector((vertexPositions[x], vertexPositions[x + 1], vertexPositions[x + 2]))) blendMesh.verts.ensure_lookup_table()", "array.array('f', [0.0] * (len(newMesh.loops) * 3)) newMesh.loops.foreach_get(\"normal\", clnors) newMesh.polygons.foreach_set( \"use_smooth\",", "not slot in switcher: continue texture = material.node_tree.nodes.new(\"ShaderNodeTexImage\") #画像ノードを作成 try:", "= mesh.VertexNormalBuffer() vertexColors = mesh.VertexColorBuffer() vertexUVs = [mesh.VertexUVLayerBuffer( x) for", "vertexColors = mesh.VertexColorBuffer() vertexUVs = [mesh.VertexUVLayerBuffer( x) for x in", "meshMaterial is not None: meshObj.data.materials.append(materialArray[meshMaterial.Name()]) for bone in skeletonObj.pose.bones: meshObj.vertex_groups.new(name=bone.name)", "skeletonObj.pose.bones: meshObj.vertex_groups.new(name=bone.name) meshObj.parent = skeletonObj modifier = meshObj.modifiers.new('Armature Rig', 'ARMATURE')", "= Matrix.Translation(Vector(bone.LocalPosition())) matrices[bone.Name()] = matTranslation @ matRotation handles[i] = newBone", "you already created the material, ignore this materialNew = bpy.data.materials.get(material.Name())", "bpy.data.node_groups['S/G-Blender'] #テクスチャを定義 switcher = { \"albedo\": \"Diffuse map\", \"diffuse\": \"Diffuse", "def importSkeletonNode(name, skeleton): if skeleton is None: return None armature", "vertexNormals[( vertexIndex * 3) + 1], vertexNormals[(vertexIndex * 3) +", "x in range(0, len(vertexPositions), 3): blendMesh.verts.new( Vector((vertexPositions[x], vertexPositions[x + 1],", "if meshMaterial is not None: meshObj.data.materials.append(materialArray[meshMaterial.Name()]) for bone in skeletonObj.pose.bones:", "a BSDF shader node # so we're gonna give it", "map\", \"gloss\": \"Glossiness map\", \"normal\": \"Normal map\", \"emissive\": \"Emission input\"", "skeletonObj.show_in_front = True bpy.context.view_layer.active_layer_collection.collection.objects.link( skeletonObj) bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='EDIT') bones", "model.Materials())} meshes = model.Meshes() for mesh in meshes: newMesh =", "master race matRotation = Quaternion( (tempQuat[3], tempQuat[0], tempQuat[1], tempQuat[2])).to_matrix().to_4x4() matTranslation", "faceLookupMap[x]] if vertexNormals is not None: vertexNormalLayer.append((vertexNormals[vertexIndex * 3], vertexNormals[(", "8, 0)] faces = mesh.FaceBuffer() for faceStart in range(0, len(faces),", "= 'NodeTree' object_name = 'S/G-Blender' bpy.ops.wm.append( filepath=os.path.join(file_path, inner_path, object_name), directory=os.path.join(file_path,", "= mesh.VertexColorBuffer() vertexUVs = [mesh.VertexUVLayerBuffer( x) for x in range(mesh.UVLayerCount())]", "0.05, 0 # I am sorry but blender sucks tempQuat", "(len(newMesh.loops) * 3)) newMesh.loops.foreach_get(\"normal\", clnors) newMesh.polygons.foreach_set( \"use_smooth\", [True] * len(newMesh.polygons))", "importMaterialNode(path, x) for x in model.Materials())} meshes = model.Meshes() for", "for x, _loop in enumerate(newMesh.loops): newMesh.loops[x].normal = vertexNormalLayer[x] newMesh.validate(clean_customdata=False) clnors", "Curve, NotificationTrack, Mesh, Skeleton, Bone, Material, File def utilityBuildPath(root, asset):", "bpy.ops.object.mode_set(mode='POSE') for bone in skeletonObj.pose.bones: bone.matrix_basis.identity() bone.matrix = matrices[bone.name] bpy.ops.pose.armature_apply()", "vertexNormalLayer.append((vertexNormals[vertexIndex * 3], vertexNormals[( vertexIndex * 3) + 1], vertexNormals[(vertexIndex", "for child in node.ChildrenOfType(Animation): # importAnimationNode(child, path) def importCast(path): cast", "meshObj.parent = skeletonObj modifier = meshObj.modifiers.new('Armature Rig', 'ARMATURE') modifier.object =", "give it one utilityAssignBSDFMaterialSlots(materialNew, material.Slots(), path) return material.Name(), materialNew def", "> 0: weightBoneBuffer = mesh.VertexWeightBoneBuffer() weightValueBuffer = mesh.VertexWeightValueBuffer() for x,", "load(self, context, filepath=\"\"): # シェーダーをアペンド shader_path = bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path try: file_path", "bone in enumerate(bones): newBone = armature.edit_bones.new(bone.Name()) newBone.tail = 0, 0.05,", "vertexNormalLayer[x] newMesh.validate(clean_customdata=False) clnors = array.array('f', [0.0] * (len(newMesh.loops) * 3))", "None: return material.Name(), materialNew materialNew = bpy.data.materials.new(name=material.Name()) materialNew.use_nodes = True", "shader node # so we're gonna give it one utilityAssignBSDFMaterialSlots(materialNew,", "False modifier.use_vertex_groups = True def importRootNode(node, path): for child in", "importCast(filepath) # Update the scene, reset view mode before returning.", "= {} for i, bone in enumerate(bones): newBone = armature.edit_bones.new(bone.Name())", "not None: meshObj.data.materials.append(materialArray[meshMaterial.Name()]) for bone in skeletonObj.pose.bones: meshObj.vertex_groups.new(name=bone.name) meshObj.parent =", "3) + 1], vertexNormals[(vertexIndex * 3) + 2])) for uvLayer", "+ 2])) for uvLayer in range(mesh.UVLayerCount()): uv = Vector( (vertexUVs[uvLayer][vertexIndex", "* from bpy_extras.image_utils import load_image from .cast import Cast, Model,", "import bmesh import os import array import math from mathutils", "not connection.__class__ is File: continue if not slot in switcher:", "= meshObj meshMaterial = mesh.Material() if meshMaterial is not None:", "blendMesh.verts[faces[faceStart + faceLookupMap[2]]]] try: newLoop = blendMesh.faces.new(indices) except ValueError: continue", "# importAnimationNode(child, path) def importCast(path): cast = Cast() cast.load(path) for", "nodes importCast(filepath) # Update the scene, reset view mode before", "shader.node_tree = bpy.data.node_groups['S/G-Blender'] #テクスチャを定義 switcher = { \"albedo\": \"Diffuse map\",", "in range(mesh.UVLayerCount())] def vertexToFaceVertex(face): for x, loop in enumerate(face.loops): vertexIndex", "= mesh.VertexPositionBuffer() for x in range(0, len(vertexPositions), 3): blendMesh.verts.new( Vector((vertexPositions[x],", "importer was created by Nick # Copyright (c) 2020 Nick", "vertexIndex * 3) + 1], vertexNormals[(vertexIndex * 3) + 2]))", "if bone.ParentIndex() > -1: handles[i].parent = handles[bone.ParentIndex()] bpy.context.view_layer.objects.active = skeletonObj", "context, filepath=\"\"): # シェーダーをアペンド shader_path = bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path try: file_path =", "for (key, value) in ( importMaterialNode(path, x) for x in", "-1: handles[i].parent = handles[bone.ParentIndex()] bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='POSE') for bone", "in ( importMaterialNode(path, x) for x in model.Materials())} meshes =", "If you already created the material, ignore this materialNew =", "in enumerate(blendMesh.verts): if (weightValueBuffer[x * maximumInfluence] > 0.0): vert[vertexWeightLayer][weightBoneBuffer[x *", "try: newLoop = blendMesh.faces.new(indices) except ValueError: continue else: vertexToFaceVertex(newLoop) maximumInfluence", "uv.y = 1.0 - uv.y loop[vertexUVLayers[uvLayer]].uv = uv if vertexColors", "= { \"albedo\": \"Diffuse map\", \"diffuse\": \"Diffuse map\", \"specular\": \"Specular", "= skeletonObj bpy.ops.object.mode_set(mode='POSE') for bone in skeletonObj.pose.bones: bone.matrix_basis.identity() bone.matrix =", "vertexNormalLayer = [] vertexNormals = mesh.VertexNormalBuffer() vertexColors = mesh.VertexColorBuffer() vertexUVs", "in model.Materials())} meshes = model.Meshes() for mesh in meshes: newMesh", "connect the slots for slot in slots: connection = slots[slot]", "sorry but blender sucks tempQuat = bone.LocalRotation() # Also sucks,", "model from the path modelName = os.path.splitext(os.path.basename(path))[0] # Import skeleton", "= bpy.data.materials.get(material.Name()) if materialNew is not None: return material.Name(), materialNew", "= True bpy.context.view_layer.active_layer_collection.collection.objects.link( skeletonObj) bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='EDIT') bones =", "range(mesh.UVLayerCount())] vertexPositions = mesh.VertexPositionBuffer() for x in range(0, len(vertexPositions), 3):", "created by Nick # Copyright (c) 2020 Nick import bpy", "object_name), directory=os.path.join(file_path, inner_path), filename=object_name ) except: self.report({'ERROR'}, 'Set the Shader", "= mesh.Material() if meshMaterial is not None: meshObj.data.materials.append(materialArray[meshMaterial.Name()]) for bone", "x) for x in model.Materials())} meshes = model.Meshes() for mesh", "= mesh.VertexWeightValueBuffer() for x, vert in enumerate(blendMesh.verts): if (weightValueBuffer[x *", "importMaterialNode(path, material): # If you already created the material, ignore", "0.0): vert[vertexWeightLayer][weightBoneBuffer[x * maximumInfluence] ] = weightValueBuffer[x * maximumInfluence] blendMesh.to_mesh(newMesh)", "clnors = array.array('f', [0.0] * (len(newMesh.loops) * 3)) newMesh.loops.foreach_get(\"normal\", clnors)", "Cast() cast.load(path) for root in cast.Roots(): importRootNode(root, path) def load(self,", "# texture.outputのカラーとinputをつなげる(link) else: material.node_tree.nodes.remove(texture) def importSkeletonNode(name, skeleton): if skeleton is", "meshObj meshMaterial = mesh.Material() if meshMaterial is not None: meshObj.data.materials.append(materialArray[meshMaterial.Name()])", "maximumInfluence] ] = weightValueBuffer[x * maximumInfluence] blendMesh.to_mesh(newMesh) newMesh.create_normals_split() if len(vertexNormalLayer)", "for mesh in meshes: newMesh = bpy.data.meshes.new(\"polySurfaceMesh\") blendMesh = bmesh.new()", "1], vertexNormals[(vertexIndex * 3) + 2])) for uvLayer in range(mesh.UVLayerCount()):", "material.node_tree.nodes.remove( material.node_tree.nodes[\"Principled BSDF\"]) shader = material.node_tree.nodes.new(\"ShaderNodeGroup\") output = material.node_tree.nodes['Material Output']", "# Copyright (c) 2020 Nick import bpy import bmesh import", "'NodeTree' object_name = 'S/G-Blender' bpy.ops.wm.append( filepath=os.path.join(file_path, inner_path, object_name), directory=os.path.join(file_path, inner_path),", "AddonPreferences first.') return False # Parse and load cast nodes", "newBone.tail = 0, 0.05, 0 # I am sorry but", "meshMaterial = mesh.Material() if meshMaterial is not None: meshObj.data.materials.append(materialArray[meshMaterial.Name()]) for", "\"emissive\": \"Emission input\" } # Loop and connect the slots", "in (24, 16, 8, 0)] faces = mesh.FaceBuffer() for faceStart", "* len(bones) matrices = {} for i, bone in enumerate(bones):", "= [] vertexNormals = mesh.VertexNormalBuffer() vertexColors = mesh.VertexColorBuffer() vertexUVs =", "am sorry but blender sucks tempQuat = bone.LocalRotation() # Also", "bpy.context.view_layer.active_layer_collection.collection.objects.link( meshObj) bpy.context.view_layer.objects.active = meshObj meshMaterial = mesh.Material() if meshMaterial", "0xff) / 255.0 for i in (24, 16, 8, 0)]", "def importMaterialNode(path, material): # If you already created the material,", "for child in node.ChildrenOfType(Model): importModelNode(child, path) # for child in", "\"map%d\" % x) for x in range(mesh.UVLayerCount())] vertexPositions = mesh.VertexPositionBuffer()", "map\", \"normal\": \"Normal map\", \"emissive\": \"Emission input\" } # Loop", "Update the scene, reset view mode before returning. bpy.context.view_layer.update() bpy.ops.object.mode_set(mode=\"OBJECT\")", "= mesh.MaximumWeightInfluence() if maximumInfluence > 0: weightBoneBuffer = mesh.VertexWeightBoneBuffer() weightValueBuffer", "= [mesh.VertexUVLayerBuffer( x) for x in range(mesh.UVLayerCount())] def vertexToFaceVertex(face): for", "* maximumInfluence] ] = weightValueBuffer[x * maximumInfluence] blendMesh.to_mesh(newMesh) newMesh.create_normals_split() if", "race matRotation = Quaternion( (tempQuat[3], tempQuat[0], tempQuat[1], tempQuat[2])).to_matrix().to_4x4() matTranslation =", "blendMesh = bmesh.new() vertexColorLayer = blendMesh.loops.layers.color.new(\"color1\") vertexWeightLayer = blendMesh.verts.layers.deform.new() vertexUVLayers", "x) for x in range(mesh.UVLayerCount())] vertexPositions = mesh.VertexPositionBuffer() for x", "= bone.LocalRotation() # Also sucks, WXYZ? => XYZW master race", "3) + 2])) for uvLayer in range(mesh.UVLayerCount()): uv = Vector(", "/ 255.0 for i in (24, 16, 8, 0)] faces", "newMesh.loops.foreach_get(\"normal\", clnors) newMesh.polygons.foreach_set( \"use_smooth\", [True] * len(newMesh.polygons)) newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3)))", "for x in range(mesh.UVLayerCount())] vertexPositions = mesh.VertexPositionBuffer() for x in", "bpy.data.materials.get(material.Name()) if materialNew is not None: return material.Name(), materialNew materialNew", "vertexToFaceVertex(face): for x, loop in enumerate(face.loops): vertexIndex = faces[faceStart +", "maximumInfluence > 0: weightBoneBuffer = mesh.VertexWeightBoneBuffer() weightValueBuffer = mesh.VertexWeightValueBuffer() for", "asset): if os.path.isabs(asset): return asset root = os.path.dirname(root) return os.path.join(root,", "= 1.0 - uv.y loop[vertexUVLayers[uvLayer]].uv = uv if vertexColors is", "except RuntimeError: pass if texture.image: material.node_tree.links.new( shader.inputs[switcher[slot]], texture.outputs[\"Color\"]) material.node_tree.links.new(shader.outputs[0], output.inputs[0])", "in node.ChildrenOfType(Animation): # importAnimationNode(child, path) def importCast(path): cast = Cast()", "= skeletonObj bpy.ops.object.mode_set(mode='EDIT') bones = skeleton.Bones() handles = [None] *", "Cast, Model, Animation, Curve, NotificationTrack, Mesh, Skeleton, Bone, Material, File", "Shader path in AddonPreferences first.') return False # Parse and", "materialNew def importModelNode(model, path): # Extract the name of this", "\"STICK\" skeletonObj = bpy.data.objects.new(name, armature) skeletonObj.show_in_front = True bpy.context.view_layer.active_layer_collection.collection.objects.link( skeletonObj)", "texture.outputs[\"Color\"]) material.node_tree.links.new(shader.outputs[0], output.inputs[0]) # texture.outputのカラーとinputをつなげる(link) else: material.node_tree.nodes.remove(texture) def importSkeletonNode(name, skeleton):", "enumerate(bones): newBone = armature.edit_bones.new(bone.Name()) newBone.tail = 0, 0.05, 0 #", "created the material, ignore this materialNew = bpy.data.materials.get(material.Name()) if materialNew", "pass if texture.image: material.node_tree.links.new( shader.inputs[switcher[slot]], texture.outputs[\"Color\"]) material.node_tree.links.new(shader.outputs[0], output.inputs[0]) # texture.outputのカラーとinputをつなげる(link)", "path modelName = os.path.splitext(os.path.basename(path))[0] # Import skeleton for binds, materials", "* (len(newMesh.loops) * 3)) newMesh.loops.foreach_get(\"normal\", clnors) newMesh.polygons.foreach_set( \"use_smooth\", [True] *", "self.report({'ERROR'}, 'Set the Shader path in AddonPreferences first.') return False", "return material.Name(), materialNew materialNew = bpy.data.materials.new(name=material.Name()) materialNew.use_nodes = True #", "only wants a BSDF shader node # so we're gonna", "shader_path inner_path = 'NodeTree' object_name = 'S/G-Blender' bpy.ops.wm.append( filepath=os.path.join(file_path, inner_path,", "faces[faceStart + faceLookupMap[x]] if vertexNormals is not None: vertexNormalLayer.append((vertexNormals[vertexIndex *", "(tempQuat[3], tempQuat[0], tempQuat[1], tempQuat[2])).to_matrix().to_4x4() matTranslation = Matrix.Translation(Vector(bone.LocalPosition())) matrices[bone.Name()] = matTranslation", "if not connection.__class__ is File: continue if not slot in", "Rig', 'ARMATURE') modifier.object = skeletonObj modifier.use_bone_envelopes = False modifier.use_vertex_groups =", "newMesh.loops[x].normal = vertexNormalLayer[x] newMesh.validate(clean_customdata=False) clnors = array.array('f', [0.0] * (len(newMesh.loops)", "cast = Cast() cast.load(path) for root in cast.Roots(): importRootNode(root, path)", "path): material.node_tree.nodes.remove( material.node_tree.nodes[\"Principled BSDF\"]) shader = material.node_tree.nodes.new(\"ShaderNodeGroup\") output = material.node_tree.nodes['Material", "mesh in meshes: newMesh = bpy.data.meshes.new(\"polySurfaceMesh\") blendMesh = bmesh.new() vertexColorLayer", "BSDF shader node # so we're gonna give it one", "tempQuat[2])).to_matrix().to_4x4() matTranslation = Matrix.Translation(Vector(bone.LocalPosition())) matrices[bone.Name()] = matTranslation @ matRotation handles[i]", "was created by Nick # Copyright (c) 2020 Nick import", "False # Parse and load cast nodes importCast(filepath) # Update", "vert in enumerate(blendMesh.verts): if (weightValueBuffer[x * maximumInfluence] > 0.0): vert[vertexWeightLayer][weightBoneBuffer[x", "utilityAssignBSDFMaterialSlots(material, slots, path): material.node_tree.nodes.remove( material.node_tree.nodes[\"Principled BSDF\"]) shader = material.node_tree.nodes.new(\"ShaderNodeGroup\") output", "in range(mesh.UVLayerCount()): uv = Vector( (vertexUVs[uvLayer][vertexIndex * 2], vertexUVs[uvLayer][(vertexIndex *", "uv.y loop[vertexUVLayers[uvLayer]].uv = uv if vertexColors is not None: loop[vertexColorLayer]", "not None: return material.Name(), materialNew materialNew = bpy.data.materials.new(name=material.Name()) materialNew.use_nodes =", "* 3) + 2])) for uvLayer in range(mesh.UVLayerCount()): uv =", "WXYZ? => XYZW master race matRotation = Quaternion( (tempQuat[3], tempQuat[0],", "\"Specular map\", \"ao\": \"AO map\", \"cavity\": \"Cavity map\", \"gloss\": \"Glossiness", "} # Loop and connect the slots for slot in", "map\", \"ao\": \"AO map\", \"cavity\": \"Cavity map\", \"gloss\": \"Glossiness map\",", "True # Blender really only wants a BSDF shader node", "utilityBuildPath(root, asset): if os.path.isabs(asset): return asset root = os.path.dirname(root) return", "in enumerate(newMesh.loops): newMesh.loops[x].normal = vertexNormalLayer[x] newMesh.validate(clean_customdata=False) clnors = array.array('f', [0.0]", "Nick import bpy import bmesh import os import array import", "weightValueBuffer = mesh.VertexWeightValueBuffer() for x, vert in enumerate(blendMesh.verts): if (weightValueBuffer[x", "in node.ChildrenOfType(Model): importModelNode(child, path) # for child in node.ChildrenOfType(Animation): #", "0, 0.05, 0 # I am sorry but blender sucks", "importRootNode(node, path): for child in node.ChildrenOfType(Model): importModelNode(child, path) # for", "newMesh) bpy.context.view_layer.active_layer_collection.collection.objects.link( meshObj) bpy.context.view_layer.objects.active = meshObj meshMaterial = mesh.Material() if", "+ faceLookupMap[1]]], blendMesh.verts[faces[faceStart + faceLookupMap[2]]]] try: newLoop = blendMesh.faces.new(indices) except", "2020 Nick import bpy import bmesh import os import array", "really only wants a BSDF shader node # so we're", "# シェーダーをアペンド shader_path = bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path try: file_path = shader_path inner_path", "グループシェーダーを作成 shader.node_tree = bpy.data.node_groups['S/G-Blender'] #テクスチャを定義 switcher = { \"albedo\": \"Diffuse", "* maximumInfluence] blendMesh.to_mesh(newMesh) newMesh.create_normals_split() if len(vertexNormalLayer) > 0: for x,", "'ARMATURE') modifier.object = skeletonObj modifier.use_bone_envelopes = False modifier.use_vertex_groups = True", "# Extract the name of this model from the path", "is File: continue if not slot in switcher: continue texture", "value) in ( importMaterialNode(path, x) for x in model.Materials())} meshes", "bpy.ops.pose.armature_apply() return skeletonObj def importMaterialNode(path, material): # If you already", "= bpy.data.materials.new(name=material.Name()) materialNew.use_nodes = True # Blender really only wants", "if materialNew is not None: return material.Name(), materialNew materialNew =", "root in cast.Roots(): importRootNode(root, path) def load(self, context, filepath=\"\"): #", ">> i & 0xff) / 255.0 for i in (24,", "> 0.0): vert[vertexWeightLayer][weightBoneBuffer[x * maximumInfluence] ] = weightValueBuffer[x * maximumInfluence]", "node.ChildrenOfType(Model): importModelNode(child, path) # for child in node.ChildrenOfType(Animation): # importAnimationNode(child,", "utilityBuildPath(path, connection.Path())) #画像を読み込み except RuntimeError: pass if texture.image: material.node_tree.links.new( shader.inputs[switcher[slot]],", "= newBone for i, bone in enumerate(bones): if bone.ParentIndex() >", "is None: return None armature = bpy.data.armatures.new(\"Joints\") armature.display_type = \"STICK\"", "bpy_extras.image_utils import load_image from .cast import Cast, Model, Animation, Curve,", "\"Glossiness map\", \"normal\": \"Normal map\", \"emissive\": \"Emission input\" } #", "\"Normal map\", \"emissive\": \"Emission input\" } # Loop and connect", "bone.matrix_basis.identity() bone.matrix = matrices[bone.name] bpy.ops.pose.armature_apply() return skeletonObj def importMaterialNode(path, material):", "> -1: handles[i].parent = handles[bone.ParentIndex()] bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='POSE') for", "skeletonObj bpy.ops.object.mode_set(mode='EDIT') bones = skeleton.Bones() handles = [None] * len(bones)", "newMesh.polygons.foreach_set( \"use_smooth\", [True] * len(newMesh.polygons)) newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3))) newMesh.use_auto_smooth =", "importAnimationNode(child, path) def importCast(path): cast = Cast() cast.load(path) for root", "scene, reset view mode before returning. bpy.context.view_layer.update() bpy.ops.object.mode_set(mode=\"OBJECT\") return True", "(key, value) in ( importMaterialNode(path, x) for x in model.Materials())}", "os.path.join(root, asset) def utilityAssignBSDFMaterialSlots(material, slots, path): material.node_tree.nodes.remove( material.node_tree.nodes[\"Principled BSDF\"]) shader", "vertexUVs[uvLayer][(vertexIndex * 2) + 1])) uv.y = 1.0 - uv.y", "bones = skeleton.Bones() handles = [None] * len(bones) matrices =", "* maximumInfluence] > 0.0): vert[vertexWeightLayer][weightBoneBuffer[x * maximumInfluence] ] = weightValueBuffer[x", "path) # for child in node.ChildrenOfType(Animation): # importAnimationNode(child, path) def", "for uvLayer in range(mesh.UVLayerCount()): uv = Vector( (vertexUVs[uvLayer][vertexIndex * 2],", "x, loop in enumerate(face.loops): vertexIndex = faces[faceStart + faceLookupMap[x]] if", "x) for x in range(mesh.UVLayerCount())] def vertexToFaceVertex(face): for x, loop", "material.node_tree.links.new(shader.outputs[0], output.inputs[0]) # texture.outputのカラーとinputをつなげる(link) else: material.node_tree.nodes.remove(texture) def importSkeletonNode(name, skeleton): if", "modelName = os.path.splitext(os.path.basename(path))[0] # Import skeleton for binds, materials for", "math from mathutils import * from bpy_extras.image_utils import load_image from", "[ (vertexColors[vertexIndex] >> i & 0xff) / 255.0 for i", "materialNew is not None: return material.Name(), materialNew materialNew = bpy.data.materials.new(name=material.Name())", "2])) for uvLayer in range(mesh.UVLayerCount()): uv = Vector( (vertexUVs[uvLayer][vertexIndex *", "weightValueBuffer[x * maximumInfluence] blendMesh.to_mesh(newMesh) newMesh.create_normals_split() if len(vertexNormalLayer) > 0: for", "skeletonObj def importMaterialNode(path, material): # If you already created the", "\"albedo\": \"Diffuse map\", \"diffuse\": \"Diffuse map\", \"specular\": \"Specular map\", \"ao\":", "# Also sucks, WXYZ? => XYZW master race matRotation =", "the slots for slot in slots: connection = slots[slot] if", "in switcher: continue texture = material.node_tree.nodes.new(\"ShaderNodeTexImage\") #画像ノードを作成 try: texture.image =", "> 0: for x, _loop in enumerate(newMesh.loops): newMesh.loops[x].normal = vertexNormalLayer[x]", "=> XYZW master race matRotation = Quaternion( (tempQuat[3], tempQuat[0], tempQuat[1],", "path) return material.Name(), materialNew def importModelNode(model, path): # Extract the", "first.') return False # Parse and load cast nodes importCast(filepath)", "matRotation = Quaternion( (tempQuat[3], tempQuat[0], tempQuat[1], tempQuat[2])).to_matrix().to_4x4() matTranslation = Matrix.Translation(Vector(bone.LocalPosition()))", "+ 1], vertexNormals[(vertexIndex * 3) + 2])) for uvLayer in", "for root in cast.Roots(): importRootNode(root, path) def load(self, context, filepath=\"\"):", "clnors) newMesh.polygons.foreach_set( \"use_smooth\", [True] * len(newMesh.polygons)) newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3))) newMesh.use_auto_smooth", "Blender really only wants a BSDF shader node # so", "sucks tempQuat = bone.LocalRotation() # Also sucks, WXYZ? => XYZW", "+ faceLookupMap[0]]], blendMesh.verts[faces[faceStart + faceLookupMap[1]]], blendMesh.verts[faces[faceStart + faceLookupMap[2]]]] try: newLoop", "import os import array import math from mathutils import *", "= weightValueBuffer[x * maximumInfluence] blendMesh.to_mesh(newMesh) newMesh.create_normals_split() if len(vertexNormalLayer) > 0:", "modifier = meshObj.modifiers.new('Armature Rig', 'ARMATURE') modifier.object = skeletonObj modifier.use_bone_envelopes =", "= True def importRootNode(node, path): for child in node.ChildrenOfType(Model): importModelNode(child,", "= matTranslation @ matRotation handles[i] = newBone for i, bone", "mesh.MaximumWeightInfluence() if maximumInfluence > 0: weightBoneBuffer = mesh.VertexWeightBoneBuffer() weightValueBuffer =", "skeletonObj = bpy.data.objects.new(name, armature) skeletonObj.show_in_front = True bpy.context.view_layer.active_layer_collection.collection.objects.link( skeletonObj) bpy.context.view_layer.objects.active", "child in node.ChildrenOfType(Animation): # importAnimationNode(child, path) def importCast(path): cast =", "uv = Vector( (vertexUVs[uvLayer][vertexIndex * 2], vertexUVs[uvLayer][(vertexIndex * 2) +", "def importCast(path): cast = Cast() cast.load(path) for root in cast.Roots():", "for i, bone in enumerate(bones): newBone = armature.edit_bones.new(bone.Name()) newBone.tail =", "NotificationTrack, Mesh, Skeleton, Bone, Material, File def utilityBuildPath(root, asset): if", "\"Emission input\" } # Loop and connect the slots for", "= [blendMesh.verts[faces[faceStart + faceLookupMap[0]]], blendMesh.verts[faces[faceStart + faceLookupMap[1]]], blendMesh.verts[faces[faceStart + faceLookupMap[2]]]]", "load cast nodes importCast(filepath) # Update the scene, reset view", "3)) newMesh.loops.foreach_get(\"normal\", clnors) newMesh.polygons.foreach_set( \"use_smooth\", [True] * len(newMesh.polygons)) newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) *", "2) + 1])) uv.y = 1.0 - uv.y loop[vertexUVLayers[uvLayer]].uv =", "newMesh.validate(clean_customdata=False) clnors = array.array('f', [0.0] * (len(newMesh.loops) * 3)) newMesh.loops.foreach_get(\"normal\",", "vertexNormals = mesh.VertexNormalBuffer() vertexColors = mesh.VertexColorBuffer() vertexUVs = [mesh.VertexUVLayerBuffer( x)", "= importSkeletonNode(modelName, model.Skeleton()) materialArray = {key: value for (key, value)", "= bpy.data.meshes.new(\"polySurfaceMesh\") blendMesh = bmesh.new() vertexColorLayer = blendMesh.loops.layers.color.new(\"color1\") vertexWeightLayer =", "= slots[slot] if not connection.__class__ is File: continue if not", "importSkeletonNode(name, skeleton): if skeleton is None: return None armature =", "mesh.VertexWeightBoneBuffer() weightValueBuffer = mesh.VertexWeightValueBuffer() for x, vert in enumerate(blendMesh.verts): if", "for meshes skeletonObj = importSkeletonNode(modelName, model.Skeleton()) materialArray = {key: value", "= mesh.FaceBuffer() for faceStart in range(0, len(faces), 3): indices =", "mesh.VertexWeightValueBuffer() for x, vert in enumerate(blendMesh.verts): if (weightValueBuffer[x * maximumInfluence]", "# グループシェーダーを作成 shader.node_tree = bpy.data.node_groups['S/G-Blender'] #テクスチャを定義 switcher = { \"albedo\":", "None: vertexNormalLayer.append((vertexNormals[vertexIndex * 3], vertexNormals[( vertexIndex * 3) + 1],", "cast nodes importCast(filepath) # Update the scene, reset view mode", "bpy.data.images.load( utilityBuildPath(path, connection.Path())) #画像を読み込み except RuntimeError: pass if texture.image: material.node_tree.links.new(", "matrices[bone.name] bpy.ops.pose.armature_apply() return skeletonObj def importMaterialNode(path, material): # If you", "loop in enumerate(face.loops): vertexIndex = faces[faceStart + faceLookupMap[x]] if vertexNormals", "and load cast nodes importCast(filepath) # Update the scene, reset", "for i in (24, 16, 8, 0)] faces = mesh.FaceBuffer()", "bpy.data.armatures.new(\"Joints\") armature.display_type = \"STICK\" skeletonObj = bpy.data.objects.new(name, armature) skeletonObj.show_in_front =", "#画像を読み込み except RuntimeError: pass if texture.image: material.node_tree.links.new( shader.inputs[switcher[slot]], texture.outputs[\"Color\"]) material.node_tree.links.new(shader.outputs[0],", "16, 8, 0)] faces = mesh.FaceBuffer() for faceStart in range(0,", "file_path = shader_path inner_path = 'NodeTree' object_name = 'S/G-Blender' bpy.ops.wm.append(", "path) def load(self, context, filepath=\"\"): # シェーダーをアペンド shader_path = bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path", "None: return None armature = bpy.data.armatures.new(\"Joints\") armature.display_type = \"STICK\" skeletonObj", "range(0, len(faces), 3): indices = [blendMesh.verts[faces[faceStart + faceLookupMap[0]]], blendMesh.verts[faces[faceStart +", "model.Skeleton()) materialArray = {key: value for (key, value) in (", "continue if not slot in switcher: continue texture = material.node_tree.nodes.new(\"ShaderNodeTexImage\")", "faces = mesh.FaceBuffer() for faceStart in range(0, len(faces), 3): indices", "x, vert in enumerate(blendMesh.verts): if (weightValueBuffer[x * maximumInfluence] > 0.0):", "(c) 2020 Nick import bpy import bmesh import os import", "utilityAssignBSDFMaterialSlots(materialNew, material.Slots(), path) return material.Name(), materialNew def importModelNode(model, path): #", "bone.matrix = matrices[bone.name] bpy.ops.pose.armature_apply() return skeletonObj def importMaterialNode(path, material): #", "return asset root = os.path.dirname(root) return os.path.join(root, asset) def utilityAssignBSDFMaterialSlots(material,", "[blendMesh.loops.layers.uv.new( \"map%d\" % x) for x in range(mesh.UVLayerCount())] vertexPositions =", "this materialNew = bpy.data.materials.get(material.Name()) if materialNew is not None: return", "The Original importer was created by Nick # Copyright (c)", "File: continue if not slot in switcher: continue texture =", "skeleton.Bones() handles = [None] * len(bones) matrices = {} for", "len(vertexNormalLayer) > 0: for x, _loop in enumerate(newMesh.loops): newMesh.loops[x].normal =", "from mathutils import * from bpy_extras.image_utils import load_image from .cast", "in enumerate(face.loops): vertexIndex = faces[faceStart + faceLookupMap[x]] if vertexNormals is", "skeletonObj = importSkeletonNode(modelName, model.Skeleton()) materialArray = {key: value for (key,", "if texture.image: material.node_tree.links.new( shader.inputs[switcher[slot]], texture.outputs[\"Color\"]) material.node_tree.links.new(shader.outputs[0], output.inputs[0]) # texture.outputのカラーとinputをつなげる(link) else:", "in cast.Roots(): importRootNode(root, path) def load(self, context, filepath=\"\"): # シェーダーをアペンド", "connection.__class__ is File: continue if not slot in switcher: continue", "= \"STICK\" skeletonObj = bpy.data.objects.new(name, armature) skeletonObj.show_in_front = True bpy.context.view_layer.active_layer_collection.collection.objects.link(", "Extract the name of this model from the path modelName", "Vector( (vertexUVs[uvLayer][vertexIndex * 2], vertexUVs[uvLayer][(vertexIndex * 2) + 1])) uv.y", "'Set the Shader path in AddonPreferences first.') return False #", "0)] faces = mesh.FaceBuffer() for faceStart in range(0, len(faces), 3):", "material.node_tree.nodes.remove(texture) def importSkeletonNode(name, skeleton): if skeleton is None: return None", "is not None: vertexNormalLayer.append((vertexNormals[vertexIndex * 3], vertexNormals[( vertexIndex * 3)", "bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path try: file_path = shader_path inner_path = 'NodeTree' object_name =", "map\", \"diffuse\": \"Diffuse map\", \"specular\": \"Specular map\", \"ao\": \"AO map\",", "bmesh.new() vertexColorLayer = blendMesh.loops.layers.color.new(\"color1\") vertexWeightLayer = blendMesh.verts.layers.deform.new() vertexUVLayers = [blendMesh.loops.layers.uv.new(", "enumerate(bones): if bone.ParentIndex() > -1: handles[i].parent = handles[bone.ParentIndex()] bpy.context.view_layer.objects.active =", "mathutils import * from bpy_extras.image_utils import load_image from .cast import", "object_name = 'S/G-Blender' bpy.ops.wm.append( filepath=os.path.join(file_path, inner_path, object_name), directory=os.path.join(file_path, inner_path), filename=object_name", "os.path.isabs(asset): return asset root = os.path.dirname(root) return os.path.join(root, asset) def", "try: texture.image = bpy.data.images.load( utilityBuildPath(path, connection.Path())) #画像を読み込み except RuntimeError: pass", "{} for i, bone in enumerate(bones): newBone = armature.edit_bones.new(bone.Name()) newBone.tail", "not None: vertexNormalLayer.append((vertexNormals[vertexIndex * 3], vertexNormals[( vertexIndex * 3) +", "mesh.Material() if meshMaterial is not None: meshObj.data.materials.append(materialArray[meshMaterial.Name()]) for bone in", "cast.load(path) for root in cast.Roots(): importRootNode(root, path) def load(self, context,", "len(vertexPositions), 3): blendMesh.verts.new( Vector((vertexPositions[x], vertexPositions[x + 1], vertexPositions[x + 2])))", "output = material.node_tree.nodes['Material Output'] # グループシェーダーを作成 shader.node_tree = bpy.data.node_groups['S/G-Blender'] #テクスチャを定義", "skeletonObj) bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='EDIT') bones = skeleton.Bones() handles =", "blender sucks tempQuat = bone.LocalRotation() # Also sucks, WXYZ? =>", "2], vertexUVs[uvLayer][(vertexIndex * 2) + 1])) uv.y = 1.0 -", "loop[vertexUVLayers[uvLayer]].uv = uv if vertexColors is not None: loop[vertexColorLayer] =", "in enumerate(bones): newBone = armature.edit_bones.new(bone.Name()) newBone.tail = 0, 0.05, 0", "in range(mesh.UVLayerCount())] vertexPositions = mesh.VertexPositionBuffer() for x in range(0, len(vertexPositions),", "the material, ignore this materialNew = bpy.data.materials.get(material.Name()) if materialNew is", "importSkeletonNode(modelName, model.Skeleton()) materialArray = {key: value for (key, value) in", "meshObj.data.materials.append(materialArray[meshMaterial.Name()]) for bone in skeletonObj.pose.bones: meshObj.vertex_groups.new(name=bone.name) meshObj.parent = skeletonObj modifier", "directory=os.path.join(file_path, inner_path), filename=object_name ) except: self.report({'ERROR'}, 'Set the Shader path", "(vertexColors[vertexIndex] >> i & 0xff) / 255.0 for i in", "File def utilityBuildPath(root, asset): if os.path.isabs(asset): return asset root =", "materialArray = {key: value for (key, value) in ( importMaterialNode(path,", "maximumInfluence] blendMesh.to_mesh(newMesh) newMesh.create_normals_split() if len(vertexNormalLayer) > 0: for x, _loop", "node # so we're gonna give it one utilityAssignBSDFMaterialSlots(materialNew, material.Slots(),", "vertexColors is not None: loop[vertexColorLayer] = [ (vertexColors[vertexIndex] >> i", "connection = slots[slot] if not connection.__class__ is File: continue if", "mesh.VertexPositionBuffer() for x in range(0, len(vertexPositions), 3): blendMesh.verts.new( Vector((vertexPositions[x], vertexPositions[x", "* 2], vertexUVs[uvLayer][(vertexIndex * 2) + 1])) uv.y = 1.0", "material.node_tree.nodes[\"Principled BSDF\"]) shader = material.node_tree.nodes.new(\"ShaderNodeGroup\") output = material.node_tree.nodes['Material Output'] #", "handles = [None] * len(bones) matrices = {} for i,", "\"gloss\": \"Glossiness map\", \"normal\": \"Normal map\", \"emissive\": \"Emission input\" }", "#テクスチャを定義 switcher = { \"albedo\": \"Diffuse map\", \"diffuse\": \"Diffuse map\",", "slot in slots: connection = slots[slot] if not connection.__class__ is", "3): indices = [blendMesh.verts[faces[faceStart + faceLookupMap[0]]], blendMesh.verts[faces[faceStart + faceLookupMap[1]]], blendMesh.verts[faces[faceStart", "#画像ノードを作成 try: texture.image = bpy.data.images.load( utilityBuildPath(path, connection.Path())) #画像を読み込み except RuntimeError:", "range(mesh.UVLayerCount())] def vertexToFaceVertex(face): for x, loop in enumerate(face.loops): vertexIndex =", "= blendMesh.loops.layers.color.new(\"color1\") vertexWeightLayer = blendMesh.verts.layers.deform.new() vertexUVLayers = [blendMesh.loops.layers.uv.new( \"map%d\" %", "1], vertexPositions[x + 2]))) blendMesh.verts.ensure_lookup_table() faceLookupMap = [1, 2, 0]", "slots, path): material.node_tree.nodes.remove( material.node_tree.nodes[\"Principled BSDF\"]) shader = material.node_tree.nodes.new(\"ShaderNodeGroup\") output =", "& 0xff) / 255.0 for i in (24, 16, 8,", "import Cast, Model, Animation, Curve, NotificationTrack, Mesh, Skeleton, Bone, Material,", "\"diffuse\": \"Diffuse map\", \"specular\": \"Specular map\", \"ao\": \"AO map\", \"cavity\":", "= [1, 2, 0] vertexNormalLayer = [] vertexNormals = mesh.VertexNormalBuffer()", "[1, 2, 0] vertexNormalLayer = [] vertexNormals = mesh.VertexNormalBuffer() vertexColors", "= [blendMesh.loops.layers.uv.new( \"map%d\" % x) for x in range(mesh.UVLayerCount())] vertexPositions", "{key: value for (key, value) in ( importMaterialNode(path, x) for", "newMesh = bpy.data.meshes.new(\"polySurfaceMesh\") blendMesh = bmesh.new() vertexColorLayer = blendMesh.loops.layers.color.new(\"color1\") vertexWeightLayer", "os.path.dirname(root) return os.path.join(root, asset) def utilityAssignBSDFMaterialSlots(material, slots, path): material.node_tree.nodes.remove( material.node_tree.nodes[\"Principled", "+ 2]))) blendMesh.verts.ensure_lookup_table() faceLookupMap = [1, 2, 0] vertexNormalLayer =", "bpy.data.objects.new(\"CastMesh\", newMesh) bpy.context.view_layer.active_layer_collection.collection.objects.link( meshObj) bpy.context.view_layer.objects.active = meshObj meshMaterial = mesh.Material()", "bone in skeletonObj.pose.bones: meshObj.vertex_groups.new(name=bone.name) meshObj.parent = skeletonObj modifier = meshObj.modifiers.new('Armature", "Animation, Curve, NotificationTrack, Mesh, Skeleton, Bone, Material, File def utilityBuildPath(root,", "i & 0xff) / 255.0 for i in (24, 16,", "path): # Extract the name of this model from the", "this model from the path modelName = os.path.splitext(os.path.basename(path))[0] # Import", "switcher: continue texture = material.node_tree.nodes.new(\"ShaderNodeTexImage\") #画像ノードを作成 try: texture.image = bpy.data.images.load(", "armature.edit_bones.new(bone.Name()) newBone.tail = 0, 0.05, 0 # I am sorry", "= True meshObj = bpy.data.objects.new(\"CastMesh\", newMesh) bpy.context.view_layer.active_layer_collection.collection.objects.link( meshObj) bpy.context.view_layer.objects.active =", "0: weightBoneBuffer = mesh.VertexWeightBoneBuffer() weightValueBuffer = mesh.VertexWeightValueBuffer() for x, vert", "blendMesh.verts.layers.deform.new() vertexUVLayers = [blendMesh.loops.layers.uv.new( \"map%d\" % x) for x in", "handles[bone.ParentIndex()] bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='POSE') for bone in skeletonObj.pose.bones: bone.matrix_basis.identity()", "if os.path.isabs(asset): return asset root = os.path.dirname(root) return os.path.join(root, asset)", "faceLookupMap[2]]]] try: newLoop = blendMesh.faces.new(indices) except ValueError: continue else: vertexToFaceVertex(newLoop)", "= False modifier.use_vertex_groups = True def importRootNode(node, path): for child", "newBone for i, bone in enumerate(bones): if bone.ParentIndex() > -1:", "bpy.data.meshes.new(\"polySurfaceMesh\") blendMesh = bmesh.new() vertexColorLayer = blendMesh.loops.layers.color.new(\"color1\") vertexWeightLayer = blendMesh.verts.layers.deform.new()", "if skeleton is None: return None armature = bpy.data.armatures.new(\"Joints\") armature.display_type", "root = os.path.dirname(root) return os.path.join(root, asset) def utilityAssignBSDFMaterialSlots(material, slots, path):", "model.Meshes() for mesh in meshes: newMesh = bpy.data.meshes.new(\"polySurfaceMesh\") blendMesh =", "faceStart in range(0, len(faces), 3): indices = [blendMesh.verts[faces[faceStart + faceLookupMap[0]]],", "vertexToFaceVertex(newLoop) maximumInfluence = mesh.MaximumWeightInfluence() if maximumInfluence > 0: weightBoneBuffer =", "= uv if vertexColors is not None: loop[vertexColorLayer] = [", "matrices = {} for i, bone in enumerate(bones): newBone =", "i, bone in enumerate(bones): if bone.ParentIndex() > -1: handles[i].parent =", "None: meshObj.data.materials.append(materialArray[meshMaterial.Name()]) for bone in skeletonObj.pose.bones: meshObj.vertex_groups.new(name=bone.name) meshObj.parent = skeletonObj", "texture.image: material.node_tree.links.new( shader.inputs[switcher[slot]], texture.outputs[\"Color\"]) material.node_tree.links.new(shader.outputs[0], output.inputs[0]) # texture.outputのカラーとinputをつなげる(link) else: material.node_tree.nodes.remove(texture)", "bmesh import os import array import math from mathutils import", "newLoop = blendMesh.faces.new(indices) except ValueError: continue else: vertexToFaceVertex(newLoop) maximumInfluence =", "[True] * len(newMesh.polygons)) newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3))) newMesh.use_auto_smooth = True meshObj", "bone.ParentIndex() > -1: handles[i].parent = handles[bone.ParentIndex()] bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='POSE')", "tempQuat[1], tempQuat[2])).to_matrix().to_4x4() matTranslation = Matrix.Translation(Vector(bone.LocalPosition())) matrices[bone.Name()] = matTranslation @ matRotation", "bone.LocalRotation() # Also sucks, WXYZ? => XYZW master race matRotation", "faceLookupMap = [1, 2, 0] vertexNormalLayer = [] vertexNormals =", "* 3)) newMesh.loops.foreach_get(\"normal\", clnors) newMesh.polygons.foreach_set( \"use_smooth\", [True] * len(newMesh.polygons)) newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),)", "= {key: value for (key, value) in ( importMaterialNode(path, x)", "= material.node_tree.nodes.new(\"ShaderNodeTexImage\") #画像ノードを作成 try: texture.image = bpy.data.images.load( utilityBuildPath(path, connection.Path())) #画像を読み込み", "is not None: loop[vertexColorLayer] = [ (vertexColors[vertexIndex] >> i &", "0: for x, _loop in enumerate(newMesh.loops): newMesh.loops[x].normal = vertexNormalLayer[x] newMesh.validate(clean_customdata=False)", "bpy.ops.wm.append( filepath=os.path.join(file_path, inner_path, object_name), directory=os.path.join(file_path, inner_path), filename=object_name ) except: self.report({'ERROR'},", "[] vertexNormals = mesh.VertexNormalBuffer() vertexColors = mesh.VertexColorBuffer() vertexUVs = [mesh.VertexUVLayerBuffer(", "# Import skeleton for binds, materials for meshes skeletonObj =", "os.path.splitext(os.path.basename(path))[0] # Import skeleton for binds, materials for meshes skeletonObj", "filepath=os.path.join(file_path, inner_path, object_name), directory=os.path.join(file_path, inner_path), filename=object_name ) except: self.report({'ERROR'}, 'Set", "blendMesh.verts.ensure_lookup_table() faceLookupMap = [1, 2, 0] vertexNormalLayer = [] vertexNormals", "newMesh.use_auto_smooth = True meshObj = bpy.data.objects.new(\"CastMesh\", newMesh) bpy.context.view_layer.active_layer_collection.collection.objects.link( meshObj) bpy.context.view_layer.objects.active", "bone in skeletonObj.pose.bones: bone.matrix_basis.identity() bone.matrix = matrices[bone.name] bpy.ops.pose.armature_apply() return skeletonObj", "# Parse and load cast nodes importCast(filepath) # Update the", "\"specular\": \"Specular map\", \"ao\": \"AO map\", \"cavity\": \"Cavity map\", \"gloss\":", "child in node.ChildrenOfType(Model): importModelNode(child, path) # for child in node.ChildrenOfType(Animation):", "meshes skeletonObj = importSkeletonNode(modelName, model.Skeleton()) materialArray = {key: value for", "range(mesh.UVLayerCount()): uv = Vector( (vertexUVs[uvLayer][vertexIndex * 2], vertexUVs[uvLayer][(vertexIndex * 2)", "try: file_path = shader_path inner_path = 'NodeTree' object_name = 'S/G-Blender'", "shader = material.node_tree.nodes.new(\"ShaderNodeGroup\") output = material.node_tree.nodes['Material Output'] # グループシェーダーを作成 shader.node_tree", "i, bone in enumerate(bones): newBone = armature.edit_bones.new(bone.Name()) newBone.tail = 0,", "path): for child in node.ChildrenOfType(Model): importModelNode(child, path) # for child", "map\", \"specular\": \"Specular map\", \"ao\": \"AO map\", \"cavity\": \"Cavity map\",", "* len(newMesh.polygons)) newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3))) newMesh.use_auto_smooth = True meshObj =", "matTranslation @ matRotation handles[i] = newBone for i, bone in", "vertexPositions[x + 1], vertexPositions[x + 2]))) blendMesh.verts.ensure_lookup_table() faceLookupMap = [1,", "blendMesh.loops.layers.color.new(\"color1\") vertexWeightLayer = blendMesh.verts.layers.deform.new() vertexUVLayers = [blendMesh.loops.layers.uv.new( \"map%d\" % x)", "vertexWeightLayer = blendMesh.verts.layers.deform.new() vertexUVLayers = [blendMesh.loops.layers.uv.new( \"map%d\" % x) for", "= os.path.dirname(root) return os.path.join(root, asset) def utilityAssignBSDFMaterialSlots(material, slots, path): material.node_tree.nodes.remove(", "uvLayer in range(mesh.UVLayerCount()): uv = Vector( (vertexUVs[uvLayer][vertexIndex * 2], vertexUVs[uvLayer][(vertexIndex", "enumerate(blendMesh.verts): if (weightValueBuffer[x * maximumInfluence] > 0.0): vert[vertexWeightLayer][weightBoneBuffer[x * maximumInfluence]", "Model, Animation, Curve, NotificationTrack, Mesh, Skeleton, Bone, Material, File def", "but blender sucks tempQuat = bone.LocalRotation() # Also sucks, WXYZ?", "meshes = model.Meshes() for mesh in meshes: newMesh = bpy.data.meshes.new(\"polySurfaceMesh\")", "blendMesh.verts.new( Vector((vertexPositions[x], vertexPositions[x + 1], vertexPositions[x + 2]))) blendMesh.verts.ensure_lookup_table() faceLookupMap", "in AddonPreferences first.') return False # Parse and load cast", "output.inputs[0]) # texture.outputのカラーとinputをつなげる(link) else: material.node_tree.nodes.remove(texture) def importSkeletonNode(name, skeleton): if skeleton", "else: vertexToFaceVertex(newLoop) maximumInfluence = mesh.MaximumWeightInfluence() if maximumInfluence > 0: weightBoneBuffer", "# so we're gonna give it one utilityAssignBSDFMaterialSlots(materialNew, material.Slots(), path)", "def importModelNode(model, path): # Extract the name of this model", "gonna give it one utilityAssignBSDFMaterialSlots(materialNew, material.Slots(), path) return material.Name(), materialNew", "one utilityAssignBSDFMaterialSlots(materialNew, material.Slots(), path) return material.Name(), materialNew def importModelNode(model, path):", "\"ao\": \"AO map\", \"cavity\": \"Cavity map\", \"gloss\": \"Glossiness map\", \"normal\":", "array import math from mathutils import * from bpy_extras.image_utils import", "tempQuat = bone.LocalRotation() # Also sucks, WXYZ? => XYZW master", "range(0, len(vertexPositions), 3): blendMesh.verts.new( Vector((vertexPositions[x], vertexPositions[x + 1], vertexPositions[x +", "blendMesh.faces.new(indices) except ValueError: continue else: vertexToFaceVertex(newLoop) maximumInfluence = mesh.MaximumWeightInfluence() if", "= skeletonObj modifier = meshObj.modifiers.new('Armature Rig', 'ARMATURE') modifier.object = skeletonObj", "bone in enumerate(bones): if bone.ParentIndex() > -1: handles[i].parent = handles[bone.ParentIndex()]", "material): # If you already created the material, ignore this", "inner_path), filename=object_name ) except: self.report({'ERROR'}, 'Set the Shader path in", "we're gonna give it one utilityAssignBSDFMaterialSlots(materialNew, material.Slots(), path) return material.Name(),", "except: self.report({'ERROR'}, 'Set the Shader path in AddonPreferences first.') return", "importModelNode(model, path): # Extract the name of this model from", "= os.path.splitext(os.path.basename(path))[0] # Import skeleton for binds, materials for meshes", "tempQuat[0], tempQuat[1], tempQuat[2])).to_matrix().to_4x4() matTranslation = Matrix.Translation(Vector(bone.LocalPosition())) matrices[bone.Name()] = matTranslation @", "def utilityAssignBSDFMaterialSlots(material, slots, path): material.node_tree.nodes.remove( material.node_tree.nodes[\"Principled BSDF\"]) shader = material.node_tree.nodes.new(\"ShaderNodeGroup\")", "= shader_path inner_path = 'NodeTree' object_name = 'S/G-Blender' bpy.ops.wm.append( filepath=os.path.join(file_path,", ") except: self.report({'ERROR'}, 'Set the Shader path in AddonPreferences first.')", "matrices[bone.Name()] = matTranslation @ matRotation handles[i] = newBone for i,", "skeletonObj.pose.bones: bone.matrix_basis.identity() bone.matrix = matrices[bone.name] bpy.ops.pose.armature_apply() return skeletonObj def importMaterialNode(path,", "load_image from .cast import Cast, Model, Animation, Curve, NotificationTrack, Mesh,", "vertexIndex = faces[faceStart + faceLookupMap[x]] if vertexNormals is not None:", "skeleton is None: return None armature = bpy.data.armatures.new(\"Joints\") armature.display_type =", "faceLookupMap[1]]], blendMesh.verts[faces[faceStart + faceLookupMap[2]]]] try: newLoop = blendMesh.faces.new(indices) except ValueError:", "faceLookupMap[0]]], blendMesh.verts[faces[faceStart + faceLookupMap[1]]], blendMesh.verts[faces[faceStart + faceLookupMap[2]]]] try: newLoop =", "\"Diffuse map\", \"diffuse\": \"Diffuse map\", \"specular\": \"Specular map\", \"ao\": \"AO", "loop[vertexColorLayer] = [ (vertexColors[vertexIndex] >> i & 0xff) / 255.0", "True def importRootNode(node, path): for child in node.ChildrenOfType(Model): importModelNode(child, path)", "inner_path, object_name), directory=os.path.join(file_path, inner_path), filename=object_name ) except: self.report({'ERROR'}, 'Set the", "importRootNode(root, path) def load(self, context, filepath=\"\"): # シェーダーをアペンド shader_path =", "handles[i].parent = handles[bone.ParentIndex()] bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='POSE') for bone in", "binds, materials for meshes skeletonObj = importSkeletonNode(modelName, model.Skeleton()) materialArray =", "in slots: connection = slots[slot] if not connection.__class__ is File:", "# Loop and connect the slots for slot in slots:", "@ matRotation handles[i] = newBone for i, bone in enumerate(bones):", "material.Name(), materialNew materialNew = bpy.data.materials.new(name=material.Name()) materialNew.use_nodes = True # Blender", "the path modelName = os.path.splitext(os.path.basename(path))[0] # Import skeleton for binds,", "materials for meshes skeletonObj = importSkeletonNode(modelName, model.Skeleton()) materialArray = {key:", "materialNew = bpy.data.materials.new(name=material.Name()) materialNew.use_nodes = True # Blender really only", "name of this model from the path modelName = os.path.splitext(os.path.basename(path))[0]", "+ faceLookupMap[x]] if vertexNormals is not None: vertexNormalLayer.append((vertexNormals[vertexIndex * 3],", "bpy.data.materials.new(name=material.Name()) materialNew.use_nodes = True # Blender really only wants a", "material.node_tree.links.new( shader.inputs[switcher[slot]], texture.outputs[\"Color\"]) material.node_tree.links.new(shader.outputs[0], output.inputs[0]) # texture.outputのカラーとinputをつなげる(link) else: material.node_tree.nodes.remove(texture) def", "+ 1])) uv.y = 1.0 - uv.y loop[vertexUVLayers[uvLayer]].uv = uv", "def vertexToFaceVertex(face): for x, loop in enumerate(face.loops): vertexIndex = faces[faceStart", "= [ (vertexColors[vertexIndex] >> i & 0xff) / 255.0 for", "armature = bpy.data.armatures.new(\"Joints\") armature.display_type = \"STICK\" skeletonObj = bpy.data.objects.new(name, armature)", "= meshObj.modifiers.new('Armature Rig', 'ARMATURE') modifier.object = skeletonObj modifier.use_bone_envelopes = False", "= blendMesh.faces.new(indices) except ValueError: continue else: vertexToFaceVertex(newLoop) maximumInfluence = mesh.MaximumWeightInfluence()", "materialNew.use_nodes = True # Blender really only wants a BSDF", "bpy.ops.object.mode_set(mode='EDIT') bones = skeleton.Bones() handles = [None] * len(bones) matrices", "import load_image from .cast import Cast, Model, Animation, Curve, NotificationTrack,", "texture.image = bpy.data.images.load( utilityBuildPath(path, connection.Path())) #画像を読み込み except RuntimeError: pass if", "ValueError: continue else: vertexToFaceVertex(newLoop) maximumInfluence = mesh.MaximumWeightInfluence() if maximumInfluence >", "1.0 - uv.y loop[vertexUVLayers[uvLayer]].uv = uv if vertexColors is not", "] = weightValueBuffer[x * maximumInfluence] blendMesh.to_mesh(newMesh) newMesh.create_normals_split() if len(vertexNormalLayer) >", "asset) def utilityAssignBSDFMaterialSlots(material, slots, path): material.node_tree.nodes.remove( material.node_tree.nodes[\"Principled BSDF\"]) shader =", "BSDF\"]) shader = material.node_tree.nodes.new(\"ShaderNodeGroup\") output = material.node_tree.nodes['Material Output'] # グループシェーダーを作成", "material.node_tree.nodes.new(\"ShaderNodeGroup\") output = material.node_tree.nodes['Material Output'] # グループシェーダーを作成 shader.node_tree = bpy.data.node_groups['S/G-Blender']", "import bpy import bmesh import os import array import math", "and connect the slots for slot in slots: connection =", "Mesh, Skeleton, Bone, Material, File def utilityBuildPath(root, asset): if os.path.isabs(asset):", "armature.display_type = \"STICK\" skeletonObj = bpy.data.objects.new(name, armature) skeletonObj.show_in_front = True", "= [None] * len(bones) matrices = {} for i, bone", "Parse and load cast nodes importCast(filepath) # Update the scene,", "vertexPositions[x + 2]))) blendMesh.verts.ensure_lookup_table() faceLookupMap = [1, 2, 0] vertexNormalLayer", "( importMaterialNode(path, x) for x in model.Materials())} meshes = model.Meshes()", "x in range(mesh.UVLayerCount())] def vertexToFaceVertex(face): for x, loop in enumerate(face.loops):", "return skeletonObj def importMaterialNode(path, material): # If you already created", "newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3))) newMesh.use_auto_smooth = True meshObj = bpy.data.objects.new(\"CastMesh\", newMesh)", "cast.Roots(): importRootNode(root, path) def load(self, context, filepath=\"\"): # シェーダーをアペンド shader_path", "for x in range(0, len(vertexPositions), 3): blendMesh.verts.new( Vector((vertexPositions[x], vertexPositions[x +", "True meshObj = bpy.data.objects.new(\"CastMesh\", newMesh) bpy.context.view_layer.active_layer_collection.collection.objects.link( meshObj) bpy.context.view_layer.objects.active = meshObj", "continue else: vertexToFaceVertex(newLoop) maximumInfluence = mesh.MaximumWeightInfluence() if maximumInfluence > 0:", "from bpy_extras.image_utils import load_image from .cast import Cast, Model, Animation,", "material.node_tree.nodes['Material Output'] # グループシェーダーを作成 shader.node_tree = bpy.data.node_groups['S/G-Blender'] #テクスチャを定義 switcher =", "map\", \"emissive\": \"Emission input\" } # Loop and connect the", "if len(vertexNormalLayer) > 0: for x, _loop in enumerate(newMesh.loops): newMesh.loops[x].normal", "materialNew materialNew = bpy.data.materials.new(name=material.Name()) materialNew.use_nodes = True # Blender really", "* 2) + 1])) uv.y = 1.0 - uv.y loop[vertexUVLayers[uvLayer]].uv", "enumerate(face.loops): vertexIndex = faces[faceStart + faceLookupMap[x]] if vertexNormals is not", "return None armature = bpy.data.armatures.new(\"Joints\") armature.display_type = \"STICK\" skeletonObj =", "map\", \"cavity\": \"Cavity map\", \"gloss\": \"Glossiness map\", \"normal\": \"Normal map\",", "if maximumInfluence > 0: weightBoneBuffer = mesh.VertexWeightBoneBuffer() weightValueBuffer = mesh.VertexWeightValueBuffer()", "len(bones) matrices = {} for i, bone in enumerate(bones): newBone", ".cast import Cast, Model, Animation, Curve, NotificationTrack, Mesh, Skeleton, Bone,", "maximumInfluence] > 0.0): vert[vertexWeightLayer][weightBoneBuffer[x * maximumInfluence] ] = weightValueBuffer[x *", "= bmesh.new() vertexColorLayer = blendMesh.loops.layers.color.new(\"color1\") vertexWeightLayer = blendMesh.verts.layers.deform.new() vertexUVLayers =", "i in (24, 16, 8, 0)] faces = mesh.FaceBuffer() for", "= 'S/G-Blender' bpy.ops.wm.append( filepath=os.path.join(file_path, inner_path, object_name), directory=os.path.join(file_path, inner_path), filename=object_name )", "slot in switcher: continue texture = material.node_tree.nodes.new(\"ShaderNodeTexImage\") #画像ノードを作成 try: texture.image", "meshObj = bpy.data.objects.new(\"CastMesh\", newMesh) bpy.context.view_layer.active_layer_collection.collection.objects.link( meshObj) bpy.context.view_layer.objects.active = meshObj meshMaterial", "meshObj) bpy.context.view_layer.objects.active = meshObj meshMaterial = mesh.Material() if meshMaterial is", "for binds, materials for meshes skeletonObj = importSkeletonNode(modelName, model.Skeleton()) materialArray", "mesh.FaceBuffer() for faceStart in range(0, len(faces), 3): indices = [blendMesh.verts[faces[faceStart", "shader_path = bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path try: file_path = shader_path inner_path = 'NodeTree'", "% x) for x in range(mesh.UVLayerCount())] vertexPositions = mesh.VertexPositionBuffer() for", "= blendMesh.verts.layers.deform.new() vertexUVLayers = [blendMesh.loops.layers.uv.new( \"map%d\" % x) for x", "for bone in skeletonObj.pose.bones: bone.matrix_basis.identity() bone.matrix = matrices[bone.name] bpy.ops.pose.armature_apply() return", "= True # Blender really only wants a BSDF shader", "material.node_tree.nodes.new(\"ShaderNodeTexImage\") #画像ノードを作成 try: texture.image = bpy.data.images.load( utilityBuildPath(path, connection.Path())) #画像を読み込み except", "\"normal\": \"Normal map\", \"emissive\": \"Emission input\" } # Loop and", "= array.array('f', [0.0] * (len(newMesh.loops) * 3)) newMesh.loops.foreach_get(\"normal\", clnors) newMesh.polygons.foreach_set(", "vertexPositions = mesh.VertexPositionBuffer() for x in range(0, len(vertexPositions), 3): blendMesh.verts.new(", "+ faceLookupMap[2]]]] try: newLoop = blendMesh.faces.new(indices) except ValueError: continue else:", "path) def importCast(path): cast = Cast() cast.load(path) for root in", "path in AddonPreferences first.') return False # Parse and load", "sucks, WXYZ? => XYZW master race matRotation = Quaternion( (tempQuat[3],", "vertexColorLayer = blendMesh.loops.layers.color.new(\"color1\") vertexWeightLayer = blendMesh.verts.layers.deform.new() vertexUVLayers = [blendMesh.loops.layers.uv.new( \"map%d\"", "def utilityBuildPath(root, asset): if os.path.isabs(asset): return asset root = os.path.dirname(root)", "skeletonObj bpy.ops.object.mode_set(mode='POSE') for bone in skeletonObj.pose.bones: bone.matrix_basis.identity() bone.matrix = matrices[bone.name]", "the Shader path in AddonPreferences first.') return False # Parse", "skeleton): if skeleton is None: return None armature = bpy.data.armatures.new(\"Joints\")", "uv if vertexColors is not None: loop[vertexColorLayer] = [ (vertexColors[vertexIndex]", "shader.inputs[switcher[slot]], texture.outputs[\"Color\"]) material.node_tree.links.new(shader.outputs[0], output.inputs[0]) # texture.outputのカラーとinputをつなげる(link) else: material.node_tree.nodes.remove(texture) def importSkeletonNode(name,", "if not slot in switcher: continue texture = material.node_tree.nodes.new(\"ShaderNodeTexImage\") #画像ノードを作成", "blendMesh.verts[faces[faceStart + faceLookupMap[1]]], blendMesh.verts[faces[faceStart + faceLookupMap[2]]]] try: newLoop = blendMesh.faces.new(indices)", "(24, 16, 8, 0)] faces = mesh.FaceBuffer() for faceStart in", "for x in range(mesh.UVLayerCount())] def vertexToFaceVertex(face): for x, loop in", "= matrices[bone.name] bpy.ops.pose.armature_apply() return skeletonObj def importMaterialNode(path, material): # If", "# If you already created the material, ignore this materialNew", "node.ChildrenOfType(Animation): # importAnimationNode(child, path) def importCast(path): cast = Cast() cast.load(path)", "mesh.VertexColorBuffer() vertexUVs = [mesh.VertexUVLayerBuffer( x) for x in range(mesh.UVLayerCount())] def", "import * from bpy_extras.image_utils import load_image from .cast import Cast,", "ignore this materialNew = bpy.data.materials.get(material.Name()) if materialNew is not None:", "# The Original importer was created by Nick # Copyright", "= vertexNormalLayer[x] newMesh.validate(clean_customdata=False) clnors = array.array('f', [0.0] * (len(newMesh.loops) *", "= Cast() cast.load(path) for root in cast.Roots(): importRootNode(root, path) def", "slots[slot] if not connection.__class__ is File: continue if not slot", "input\" } # Loop and connect the slots for slot", "for x in model.Materials())} meshes = model.Meshes() for mesh in", "Skeleton, Bone, Material, File def utilityBuildPath(root, asset): if os.path.isabs(asset): return", "wants a BSDF shader node # so we're gonna give", "(vertexUVs[uvLayer][vertexIndex * 2], vertexUVs[uvLayer][(vertexIndex * 2) + 1])) uv.y =", "x in range(mesh.UVLayerCount())] vertexPositions = mesh.VertexPositionBuffer() for x in range(0,", "{ \"albedo\": \"Diffuse map\", \"diffuse\": \"Diffuse map\", \"specular\": \"Specular map\",", "for x, vert in enumerate(blendMesh.verts): if (weightValueBuffer[x * maximumInfluence] >", "continue texture = material.node_tree.nodes.new(\"ShaderNodeTexImage\") #画像ノードを作成 try: texture.image = bpy.data.images.load( utilityBuildPath(path,", "in skeletonObj.pose.bones: bone.matrix_basis.identity() bone.matrix = matrices[bone.name] bpy.ops.pose.armature_apply() return skeletonObj def", "the name of this model from the path modelName =", "Matrix.Translation(Vector(bone.LocalPosition())) matrices[bone.Name()] = matTranslation @ matRotation handles[i] = newBone for", "meshObj.vertex_groups.new(name=bone.name) meshObj.parent = skeletonObj modifier = meshObj.modifiers.new('Armature Rig', 'ARMATURE') modifier.object", "matRotation handles[i] = newBone for i, bone in enumerate(bones): if", "in enumerate(bones): if bone.ParentIndex() > -1: handles[i].parent = handles[bone.ParentIndex()] bpy.context.view_layer.objects.active", "mesh.VertexNormalBuffer() vertexColors = mesh.VertexColorBuffer() vertexUVs = [mesh.VertexUVLayerBuffer( x) for x", "importModelNode(child, path) # for child in node.ChildrenOfType(Animation): # importAnimationNode(child, path)", "skeleton for binds, materials for meshes skeletonObj = importSkeletonNode(modelName, model.Skeleton())", "newMesh.create_normals_split() if len(vertexNormalLayer) > 0: for x, _loop in enumerate(newMesh.loops):", "None: loop[vertexColorLayer] = [ (vertexColors[vertexIndex] >> i & 0xff) /", "vertexNormals is not None: vertexNormalLayer.append((vertexNormals[vertexIndex * 3], vertexNormals[( vertexIndex *", "= skeletonObj modifier.use_bone_envelopes = False modifier.use_vertex_groups = True def importRootNode(node,", "= Quaternion( (tempQuat[3], tempQuat[0], tempQuat[1], tempQuat[2])).to_matrix().to_4x4() matTranslation = Matrix.Translation(Vector(bone.LocalPosition())) matrices[bone.Name()]", "modifier.object = skeletonObj modifier.use_bone_envelopes = False modifier.use_vertex_groups = True def", "if (weightValueBuffer[x * maximumInfluence] > 0.0): vert[vertexWeightLayer][weightBoneBuffer[x * maximumInfluence] ]", "= mesh.VertexWeightBoneBuffer() weightValueBuffer = mesh.VertexWeightValueBuffer() for x, vert in enumerate(blendMesh.verts):", "[0.0] * (len(newMesh.loops) * 3)) newMesh.loops.foreach_get(\"normal\", clnors) newMesh.polygons.foreach_set( \"use_smooth\", [True]", "XYZW master race matRotation = Quaternion( (tempQuat[3], tempQuat[0], tempQuat[1], tempQuat[2])).to_matrix().to_4x4()", "bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='POSE') for bone in skeletonObj.pose.bones: bone.matrix_basis.identity() bone.matrix", "* 3) + 1], vertexNormals[(vertexIndex * 3) + 2])) for", "- uv.y loop[vertexUVLayers[uvLayer]].uv = uv if vertexColors is not None:", "except ValueError: continue else: vertexToFaceVertex(newLoop) maximumInfluence = mesh.MaximumWeightInfluence() if maximumInfluence", "for slot in slots: connection = slots[slot] if not connection.__class__", "bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='EDIT') bones = skeleton.Bones() handles = [None]", "Nick # Copyright (c) 2020 Nick import bpy import bmesh", "for bone in skeletonObj.pose.bones: meshObj.vertex_groups.new(name=bone.name) meshObj.parent = skeletonObj modifier =", "import math from mathutils import * from bpy_extras.image_utils import load_image", "if vertexNormals is not None: vertexNormalLayer.append((vertexNormals[vertexIndex * 3], vertexNormals[( vertexIndex", "Output'] # グループシェーダーを作成 shader.node_tree = bpy.data.node_groups['S/G-Blender'] #テクスチャを定義 switcher = {", "return material.Name(), materialNew def importModelNode(model, path): # Extract the name", "material.Name(), materialNew def importModelNode(model, path): # Extract the name of", "Import skeleton for binds, materials for meshes skeletonObj = importSkeletonNode(modelName,", "material.Slots(), path) return material.Name(), materialNew def importModelNode(model, path): # Extract", "vertexNormals[(vertexIndex * 3) + 2])) for uvLayer in range(mesh.UVLayerCount()): uv", "material, ignore this materialNew = bpy.data.materials.get(material.Name()) if materialNew is not", "modifier.use_bone_envelopes = False modifier.use_vertex_groups = True def importRootNode(node, path): for", "of this model from the path modelName = os.path.splitext(os.path.basename(path))[0] #", "for x, loop in enumerate(face.loops): vertexIndex = faces[faceStart + faceLookupMap[x]]", "switcher = { \"albedo\": \"Diffuse map\", \"diffuse\": \"Diffuse map\", \"specular\":", "is not None: meshObj.data.materials.append(materialArray[meshMaterial.Name()]) for bone in skeletonObj.pose.bones: meshObj.vertex_groups.new(name=bone.name) meshObj.parent", "Quaternion( (tempQuat[3], tempQuat[0], tempQuat[1], tempQuat[2])).to_matrix().to_4x4() matTranslation = Matrix.Translation(Vector(bone.LocalPosition())) matrices[bone.Name()] =", "armature) skeletonObj.show_in_front = True bpy.context.view_layer.active_layer_collection.collection.objects.link( skeletonObj) bpy.context.view_layer.objects.active = skeletonObj bpy.ops.object.mode_set(mode='EDIT')", "I am sorry but blender sucks tempQuat = bone.LocalRotation() #" ]
[ "] operations = [ migrations.AlterField( model_name='student', name='bio', field=models.CharField(blank=True, max_length=200), ),", "Django 3.0.2 on 2020-02-24 23:16 from django.db import migrations, models", "class Migration(migrations.Migration): dependencies = [ ('study', '0002_student'), ] operations =", "Migration(migrations.Migration): dependencies = [ ('study', '0002_student'), ] operations = [", "23:16 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "'0002_student'), ] operations = [ migrations.AlterField( model_name='student', name='bio', field=models.CharField(blank=True, max_length=200),", "models class Migration(migrations.Migration): dependencies = [ ('study', '0002_student'), ] operations", "('study', '0002_student'), ] operations = [ migrations.AlterField( model_name='student', name='bio', field=models.CharField(blank=True,", "2020-02-24 23:16 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('study', '0002_student'),", "[ ('study', '0002_student'), ] operations = [ migrations.AlterField( model_name='student', name='bio',", "Generated by Django 3.0.2 on 2020-02-24 23:16 from django.db import", "3.0.2 on 2020-02-24 23:16 from django.db import migrations, models class", "by Django 3.0.2 on 2020-02-24 23:16 from django.db import migrations,", "on 2020-02-24 23:16 from django.db import migrations, models class Migration(migrations.Migration):", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('study',", "<gh_stars>0 # Generated by Django 3.0.2 on 2020-02-24 23:16 from", "operations = [ migrations.AlterField( model_name='student', name='bio', field=models.CharField(blank=True, max_length=200), ), ]", "migrations, models class Migration(migrations.Migration): dependencies = [ ('study', '0002_student'), ]", "= [ ('study', '0002_student'), ] operations = [ migrations.AlterField( model_name='student',", "# Generated by Django 3.0.2 on 2020-02-24 23:16 from django.db", "dependencies = [ ('study', '0002_student'), ] operations = [ migrations.AlterField(" ]
[ "\"a name\"} mock_patch.return_value = {}, {} headers = {\"Extra\": \"extra\"}", "= \"/rest/testuri?start=0&count=-1\" members = [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}]", "uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task,", "10: 50.322Z\"} ] task_with_output = self.task.copy() task_with_output['taskOutput'] = task_output mock_post.return_value", "resource_client = ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) def", "mock_patch): mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch('/rest/testuri/123', 'operation',", "\"&count=500\" \\ \"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27\" \\ \"&query=name%20NE%20%27WrongName%27\" \\ \"&sort=name%3Aascending\".format(resource_uri=self.URI) self.assertEqual([{\"member\": \"member\"}], result)", "2.0 (the \"License\"); # you may not use this file", "mock_get): self.resource_client.get_utilization('/rest/testuri/09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) def test_get_utilization_with_empty(self): try: self.resource_client.get_utilization('')", "mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(custom_headers=self.custom_headers)", "mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\", timeout=60) mock_get_completed_task.assert_called_once_with(self.task,", "= {\"name\": \"resource1\", \"type\": \"resource\"} resource2 = {\"name\": \"resource2\", \"port\":", "50.322Z\"} ] task_with_output = self.task.copy() task_with_output['taskOutput'] = task_output mock_post.return_value =", "be returned but for pagination purposes, a nextPageUri is returned", "\"/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" self.resource_client.get(uri) mock_get.assert_called_once_with(uri) def test_get_with_uri_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI for", "\"2\"}, {\"id\": \"3\"}] mock_get.return_value = { \"nextPageUri\": uri, \"members\": members,", "mock_delete.return_value = None, self.response_body filter = \"name='Exchange Server'\" result =", "uri } result = self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\")", "timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") def test_delete_should_return_true(self, mock_delete,", "self.assertEqual(self.response_body, self.resource_client.data) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def", "{} resource_client = ResourceClient(self.connection, self.URI) resource_client.patch( '123a53cz', 'replace', '/name', 'new_name',", "class ResourceFileHandlerMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client =", "@mock.patch.object(connection, 'put') def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None, self.response_body result", "force=True, timeout=-1) mock_wait4task.assert_called_with(self.task, timeout=-1) self.assertEqual(self.task, delete_task) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task')", "dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_uri_with_force(self, mock_post): dict_to_create = {\"resource_name\":", "self.resource_helper.get_collection(filter=\"name=name\") mock_get.assert_called_once_with(self.URI + \"?filter=name%3Dname\") @mock.patch.object(connection, \"get\") def test_get_collection_with_path(self, mock_get): mock_get.return_value", "'post') def test_create_when_the_resource_is_a_list(self, mock_post): dict_to_create = [{\"resource_name\": \"a name\"}] mock_post.return_value", "schema methods\"\"\" class StubResource(Resource): \"\"\"Stub class to test resource common", "@mock.patch.object(connection, \"put\") def test_update_with_default_api_version_300(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\"}", "\"123\", \"resource_name\": \"a name\", } mock_post.return_value = self.task, {} mock_wait4task.return_value", "= None, self.response_body self.resource_client.update(dict_to_update) self.assertEqual(self.response_body, self.resource_client.data) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource,", "self.resource_client = StubResourceSchema(self.connection) super(ResourceSchemaMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"get\") def test_get_schema_uri(self, mock_get):", "\\ \"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\ \"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource,", "StubResourceFileHandler(self.connection) super(ResourceFileHandlerMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart(self, mock_post_multipart): uri =", "test_delete_with_custom_headers(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value =", "connection from hpOneView import exceptions from hpOneView.resources.resource import (ResourceClient, ResourceHelper,", "URI = \"/rest/testuri\" class BaseTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200 =", "mock_ensure_resource): mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body(uri=\"/rest/testuri\", custom_headers=self.custom_headers)", "mock_wait4task.return_value = fake_associated_resurce result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection,", "'/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'post') def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value", "test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None, self.response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration',", "self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', -1) self.assertEqual(result, entity) @mock.patch.object(connection, 'patch')", "[{\"name\": \"testname\", \"uri\": \"/rest/testuri\"}] self.resource_client.data = {\"name\": \"testname\"} mock_do_get.return_value =", "mock_get): mock_get.return_value = {\"nextPageUri\": None, \"members\": None} result = self.resource_client.get_all()", "None, 'members': []} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'get')", "except ValueError as e: self.assertTrue(\"field\" in e.args[0]) else: self.fail() @mock.patch.object(connection,", "'endDate=2016-05-31T03:29:42.361Z'], refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z'", "{\"resource_name\": \"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict,", "self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceZeroBody(self.connection) super(ResourceZeroBodyMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection,", "= \"/rest/enclosures\" mock_put.return_value = self.task, self.task mock_wait4task.return_value = response_body result", "{\"task\": \"task\", \"uri\": \"\"} try: self.resource_client.delete(dict_to_delete, False, -1) except exceptions.HPOneViewUnknownType", "custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_default_api_version_300(self, mock_post): dict_to_create = {\"resource_name\": \"a", "self.task.copy() task_with_output['taskOutput'] = task_output mock_post.return_value = self.task, {} mock_get_completed_task.return_value =", "'6'}, {'id': '7'}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get):", "expected = {\"name\": \"test\", \"type\": \"anotherType\", \"uri\": uri} self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri,", "\"value\"}, {\"key\": \"value\"}]} collection = self.resource_client.get_collection('12345') self.assertEqual(len(collection), 2) @mock.patch.object(ResourceClient, 'get_all')", "'expected'}, {'name': 'not expected'}]) mock_get_all.assert_called_once_with(filter=\"\\\"connection.name='expected'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with_uri(self,", "= None, self.response_body self.connection._apiVersion = 200 expected_dict = {\"name\": \"test\",", "created_resource result = self.resource_client.create(dict_to_create, -1) self.assertEqual(result, created_resource) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor,", "else: self.fail() def test_get_with_none(self): try: self.resource_client.get(None) except ValueError as e:", "mock_get.assert_called_once_with(expected_uri) def test_get_utilization_with_empty(self): try: self.resource_client.get_utilization('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID,", "BaseTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200 = \"typeV200\" TYPE_V300 = \"typeV300\"", "\"new_name\", }] mock_patch.return_value = {}, {} self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with(", "uri = \"/rest/testuri\" dict_to_update = {\"name\": \"test\"} expected = {\"name\":", "test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value", "custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_api_version_200(self, mock_post): dict_to_create = {\"resource_name\": \"a", "result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def", "resource patch operations\"\"\" class StubResourceUtilization(ResourceUtilizationMixin, Resource): \"\"\"Stub class to test", "mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, mock.ANY) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_true_when_success(self, mock_open,", "def test_update_should_not_override_resource_properties(self, mock_put): dict_to_update = {\"name\": \"test\", \"type\": \"anotherType\"} uri", "= { \"resource_id\": \"123\", \"resource_name\": \"a name\", } mock_post.return_value =", "'123a53cz', 'replace', '/name', 'new_name', -1) mock_wait4task.assert_called_once_with(self.task, mock.ANY) def test_delete_with_none(self): try:", "\"/rest/testuri\", request_body, custom_headers={\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v200(self,", "mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v200(self, mock_patch):", "mock_wait4task.return_value = dict_to_update result = self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(result, dict_to_update) @mock.patch.object(connection,", "Enterprise Development LP # # Licensed under the Apache License,", "dict_to_create, custom_headers=None) def test_merge_api_default_values(self): resource = {'name': 'resource1'} default_values =", "uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task,", "dict_to_update = {\"name\": \"test\"} uri = \"/rest/testuri\" expected = {\"name\":", "\"Resource Name,\") @mock.patch.object(connection, \"get\") def test_get_by_uri(self, mock_get): self.resource_client.get_by_uri(\"/rest/testuri\") mock_get.assert_called_once_with('/rest/testuri') @mock.patch.object(connection,", "StubResourceFileHandler(ResourceFileHandlerMixin, Resource): \"\"\"Stub class to test resource file operations\"\"\" class", "= {\"Content-Type\": \"application/json\", \"Extra\": \"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info,", "{\"type\": \"type1\"} resource1 = {\"name\": \"resource1\"} resource2 = {\"name\": \"resource2\"}", "mock.Mock() uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task,", "name\", \"type\": \"anotherType\"} self.resource_client.create(dict_to_create) mock_post.assert_called_once_with(self.URI, expected, custom_headers=None) @mock.patch.object(connection, \"post\") def", "uri} self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def", "= StubResourceFileHandler(self.connection) super(ResourceFileHandlerMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart(self, mock_post_multipart): uri", "None, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_and_custom_headers(self,", "def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put): response_body = {\"resource_name\": \"name\"} mock_put.return_value =", "self.resource_client.data = {'uri': uri} expected = {\"name\": \"test\", \"type\": \"typeV300\",", "def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri(\"/rest/\") except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0])", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_with_custom_headers(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body", "force=True) expected_uri = \"/rest/resource/test?force=True\" mock_put.assert_called_once_with(expected_uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def", "\"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_patch_return_entity(self, mock_wait4task,", "uri) mock_wait4task.not_been_called() @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_return_associated_resource_when_response_is_task(self,", "mock_builtin from hpOneView.connection import connection from hpOneView import exceptions from", "result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri =", "mock_get_by.return_value = [{\"name\": \"value\"}] response = self.resource_client.get_by_name('Resource Name,') self.assertEqual(response, {\"name\":", "= {\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body", "test_extract_id_from_uri(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155' id = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri)", "dict( resource='/rest/testuri/7', subresource='/rest/testuri/7/sub/8', path='sub', uri='/rest/testuri/7/sub/8'), dict( resource=None, subresource='/rest/testuri/9/sub/10', path='sub', uri='/rest/testuri/9/sub/10'),", "mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"resource_data\": \"resource_data\", \"uri\": uri}", "{\"resource_name\": \"a name\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY,", "path='sub', uri='/rest/testuri/9/sub/10'), dict( resource='/rest/testuri/11', subresource='12', path='/sub/', uri='/rest/testuri/11/sub/12'), dict( resource='/rest/testuri/13', subresource=None,", "@mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\": \"EXpected\"}, {\"name\":", "self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value = self.task, {} mock_get_completed_task.return_value =", "@mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v200(self, mock_patch): mock_patch.return_value = {}, {} self.connection._apiVersion", "ResourceClientTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200 = 'typeV200' TYPE_V300 = 'typeV300'", "exceptions.HPOneViewUnknownType as exception: self.assertEqual(message, exception.args[0]) else: self.fail(\"Expected Exception was not", "{\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_client.get_collection('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(connection,", "mock_delete.return_value = None, self.response_body self.resource_client.data = {\"uri\": \"/rest/testuri\"} result =", "self.URI) result = resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) @mock.patch.object(connection, 'post_multipart_with_response_handling') def", "\"patch\") def test_patch_request_when_uri_is_provided(self, mock_patch, mock_ensure_resource): request_body = [{ \"op\": \"replace\",", "= extract_id_from_uri(uri) self.assertEqual(extracted_id, '3518be0e-17c1-4189-8f81-83f3724f6155') def test_extract_id_from_uri_unsupported(self): # This example is", "else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'post') def test_create_when_the_resource_is_a_list(self,", "test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client.build_uri( '/rest/test/another/resource/uri/09USE7335NW3') except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0])", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value = self.task, {}", "mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_uri_with_force(self, mock_post): dict_to_create =", "def test_delete_dict_invalid_uri(self): dict_to_delete = {\"task\": \"task\", \"uri\": \"\"} try: self.resource_client.delete(dict_to_delete,", "self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v200(self, mock_patch):", "try: self.resource_client.update(None) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else:", "@mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_wait_task_completion(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput']", "License for the specific language governing permissions and # limitations", "{\"resource_name\": \"a name\", \"type\": \"anotherType\"} mock_post.return_value = {}, {} expected", "self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) class ResourceSchemaMixinTest(BaseTest): def setUp(self): self.connection", "self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('1', custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language':", "\"a name\"} mock_post.return_value = {}, {} expected_dict = {\"resource_name\": \"a", "{} self.resource_client.create(dict_to_create, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_without_default_values(self,", "Exception was not raised\") @mock.patch.object(connection, 'get') def test_get_utilization_with_args(self, mock_get): self.resource_client.get_utilization('09USE7335NW3',", "= {}, {} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection,", "self.resource_helper.get_collection() mock_get.assert_called_once_with(self.URI) @mock.patch.object(connection, \"get\") def test_get_collection_with_filter(self, mock_get): mock_get.return_value = {}", "class ResourceZeroBodyMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client =", "mock_update): mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('1', custom_headers=self.custom_headers)", "(ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI,", "uri_list = ['/rest/testuri?start=0&count=3', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1], 'members':", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body", "dict_to_update = {\"name\": \"test\", \"type\": \"anotherType\"} uri = \"/rest/testuri\" mock_put.return_value", "subresource=\"/rest/testuri/9/sub/10\", path=\"sub\", uri=\"/rest/testuri/9/sub/10\"), dict( resource=\"/rest/testuri/11\", subresource=\"12\", path=\"/sub/\", uri=\"/rest/testuri/11/sub/12\"), dict( resource=\"/rest/testuri/13\",", "mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "= {}, {} expected = {\"resource_name\": \"a name\", \"type\": \"anotherType\"}", "'wait_for_task') def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock() uri =", "name\"} mock_patch.return_value = {}, {} headers = {\"Extra\": \"extra\"} self.connection._apiVersion", "e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection,", "\"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v200(self, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\"", "{\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch(\"replace\",", "test_delete_all_should_wait_for_task(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task", "@mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\"", "\"test\", \"type\": \"anotherType\", \"uri\": uri} self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource,", "custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value", "entity) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers_with_content_type(self, mock_task,", "self.task self.resource_client.update_with_zero_body(uri=\"/rest/testuri\", custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "test_merge_default_values(self): default_type = {\"type\": \"type1\"} resource1 = {\"name\": \"resource1\"} resource2", "task_output mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output result =", "= self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_given_uri(self,", "@mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart(self, mock_post_multipart): uri = \"/rest/testuri/\" filepath =", "self.resource_client = ResourceClient(self.connection, self.URI) self.task = {\"task\": \"task\", \"taskState\": \"Finished\"}", "self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_post.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", {}, custom_headers=None)", "test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce = mock.Mock() uri = \"/rest/testuri/\"", "test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client._helper.build_subresource_uri(None, \"123456\", \"sub-path\") except exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED,", "def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceUtilization(self.connection) super(ResourceUtilizationMixinTest,", "subresource=\"2\", path=\"sub\", uri=\"/rest/testuri/1/sub/2\"), dict( resource=\"/rest/testuri/3\", subresource=\"4\", path=\"sub\", uri=\"/rest/testuri/3/sub/4\"), dict( resource=\"5\",", "a nextPageUri is returned by OneView. \"\"\" uri_list = [\"/rest/testuri?start=0&count=3\",", "@mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri =", "self.task mock_wait4task.return_value = entity result = self.resource_client.patch( '123a53cz', 'replace', '/name',", "= extract_id_from_uri(uri) self.assertEqual(extracted_id, '') def test_extract_id_from_uri_passing_id(self): uri = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id", "[\"/rest/testuri?start=0&count=3\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"},", "= [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self,", "\"get\") def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {\"nextPageUri\": None, \"members\": None}", "self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_raise_resource_not_found_exception_without_uri(self, mock_get_by): self.resource_client.data = {\"name\":", "= {'nextPageUri': None, 'members': []} result = self.resource_client.get_all() self.assertEqual(result, [])", "mock_delete.assert_called_once_with(uri) @mock.patch.object(connection, 'delete') def test_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None, self.response_body", "\"get\") def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce = mock.Mock() uri", "unavailable_method) class FakeResource(object): def __init__(self, con): self._connection = con self._client", "{}, {} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def", "\"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with('/rest/testuri', mock.ANY, mock.ANY) @mock.patch.object(connection,", "mock_post_multipart): fake_associated_resurce = mock.Mock() uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "\"/rest/testuri?start=6&count=1\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"}, {\"id\": \"2\"},", "\"/uri/test\"} mock_do_get.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(Resource, \"get_by\") def", "mock_post_multipart): fake_response_body = mock.Mock() uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_return_entity(self, mock_wait4task, mock_put):", "def test_transform_list_to_dict(self): list = ['one', 'two', {'tree': 3}, 'four', 5]", "mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={}) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v300(self, mock_patch): request_body", "[{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all() expected_calls =", "TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method) class StubResourceFileHandler(ResourceFileHandlerMixin,", "self.task, {} mock_wait4task.return_value = dict_to_update result = self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(result,", "test_get_with_none(self): try: self.resource_client.get(None) except ValueError as e: self.assertTrue(\"id\" in e.args[0])", "@mock.patch.object(connection, 'get') def test_get_utilization_by_uri_with_defaults(self, mock_get): self.resource_client.get_utilization('/rest/testuri/09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri)", "\"/rest/resource/test\" mock_put.return_value = None, self.response_body self.connection._apiVersion = 200 expected_dict =", "\"resource2\", \"type\": \"resource\", \"port\": \"1\"} merged_resource = merge_resources(resource1, resource2) self.assertEqual(merged_resource,", "= 300 resource_client = ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with('/rest/testuri/id',", "\"123a53cz\"} mock_patch.return_value = self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch( '123a53cz',", "uri=\"/rest/testuri/7/sub/8\"), dict( resource=None, subresource=\"/rest/testuri/9/sub/10\", path=\"sub\", uri=\"/rest/testuri/9/sub/10\"), dict( resource=\"/rest/testuri/11\", subresource=\"12\", path=\"/sub/\",", "None) except ValueError as e: self.assertTrue(\"field\" in e.args[0]) else: self.fail()", "{\"id\": \"2\"}, {\"id\": \"3\"}] mock_get.return_value = { \"nextPageUri\": uri, \"members\":", "\"resource2\", \"port\": \"1\"} expected_resource = {\"name\": \"resource2\", \"type\": \"resource\", \"port\":", "= \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" fake_file = io.StringIO() mock_open.return_value = fake_file self.resource_client.download(uri, file_path)", "{'uri': uri} expected = {\"name\": \"test\", \"type\": \"typeV300\", \"uri\": uri}", "test_create_report_should_do_post_request(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value", "mock_get_completed_task.return_value = task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, []) @mock.patch.object(connection, 'post')", "mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\")", "test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345', ['name1=one', 'name2=two', 'name=three']) mock_get.assert_called_once_with(self.URI", "@mock.patch.object(connection, 'post') def test_create_with_custom_headers(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"}", "\"new_name\") mock_patch.assert_called_once_with( \"/rest/testuri\", request_body, custom_headers={\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\")", "update_task = self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, update_task) mock_update.assert_called_once_with(\"a_uri\", dict_to_update, custom_headers=None) @mock.patch.object(connection,", "mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with('/rest/testuri', mock.ANY, mock.ANY) @mock.patch.object(connection, 'post_multipart_with_response_handling')", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): uri = \"/rest/testuri\"", "self._client = ResourceClient(con, \"/rest/fake/resource\") def get_fake(self, uri): return self._client.get(uri) class", "e.args[0]) else: self.fail() def test_get_with_none(self): try: self.resource_client.get(None) except ValueError as", "@mock.patch(mock_builtin('open')) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri =", "= {\"resource_name\": \"a name\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers)", "\"Extra\": \"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with(uri, dict_info,", "members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {\"nextPageUri\":", "= \"/rest/testuri\" TYPE_V200 = 'typeV200' TYPE_V300 = 'typeV300' DEFAULT_VALUES =", "\"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers(self, mock_task, mock_patch, mock_ensure_resource): uri =", "\"test\"} uri = \"/rest/testuri\" expected = {\"name\": \"test\", \"uri\": uri,", "mock_wait4task, mock_post): mock_post.return_value = self.task, {} mock_wait4task.return_value = self.task self.resource_client.create({\"test\":", "mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = '/rest/testuri?start=0&count=-1' members", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v300(self, mock_patch, mock_ensure_resource): request_body =", "def test_delete_by_id_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value =", "self.connection._apiVersion = 200 expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V200}", "mock_get.return_value = {\"nextPageUri\": None, \"members\": []} result = self.resource_client.get_all() self.assertEqual(result,", "# -*- coding: utf-8 -*- ### # (C) Copyright [2019]", "self.assertEqual(result, dict_to_update) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post):", "{}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value',", "= self.URI + \"/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" self.resource_client.get(uri) mock_get.assert_called_once_with(uri) def test_get_with_uri_with_incompatible_url_shoud_fail(self): message =", "'/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'},", "\"/rest/testuri\" mock_post.assert_called_once_with(expected_uri, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_api_version_200(self, mock_post): dict_to_create", "test_update_with_force(self, mock_put, mock_laod_resource): dict_to_update = {\"name\": \"test\"} uri = \"/rest/testuri\"", "mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_post.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", {}, custom_headers=None) @mock.patch.object(connection,", "= {\"task\": \"task\", \"uri\": \"\"} try: self.resource_client.delete(dict_to_delete, False, -1) except", "Exception was not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client.build_uri('/rest/') except exceptions.HPOneViewUnknownType", "def test_get_by_with_name_none(self): try: self.resource_client.get_by(None, None) except ValueError as e: self.assertTrue(\"field\"", "'patch') def test_patch_request_when_id_is_provided_v300(self, mock_patch): request_body = [{ 'op': 'replace', 'path':", "custom_headers=headers) mock_patch.assert_called_once_with( uri, dict_info, custom_headers={\"Extra\": \"extra\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\")", "expected = {\"resource_name\": \"a name\", \"type\": \"anotherType\"} self.resource_client.create(dict_to_create) mock_post.assert_called_once_with(self.URI, expected,", "self.URI) resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body,", "not supported yet uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/otherthing' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id,", "\"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with(\"/rest/testuri\", mock.ANY, mock.ANY) @mock.patch.object(connection,", "self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with(uri, request_body, custom_headers={}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\")", "'2'}, {'id': '3'}]}, {'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'},", "self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('1', custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})", "@mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v300(self, mock_patch, mock_ensure_resource): request_body = [{ \"op\":", "mock_post.return_value = self.task, {} mock_wait4task.return_value = self.task self.resource_client.create({\"test\": \"test\"}, timeout=60)", "[] actual_result = self.resource_client.ensure_resource_data(update_data=False) expected_result = None self.assertEqual(actual_result, expected_result) @mock.patch.object(connection,", "'get') def test_get_utilization_by_uri_with_defaults(self, mock_get): self.resource_client.get_utilization('/rest/testuri/09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) def", "self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value') uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_different_resource_uri_should_fail(self, mock_get):", "extract_id_from_uri(uri) self.assertEqual(extracted_id, '') def test_extract_id_from_uri_passing_id(self): uri = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id =", "'/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() mock_wait4task.return_value =", "None, \"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=15)", "= [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get') def test_get_all_should_return_all_items_when_response_paginated(self,", "expected, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_without_default_values(self, mock_post): dict_to_create = {\"resource_name\":", "self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_wait4task.assert_called_with(self.task, timeout=-1) self.assertEqual(self.task, delete_task) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor,", "= self.resource_client.ensure_resource_data(update_data=False) expected_result = None self.assertEqual(actual_result, expected_result) @mock.patch.object(connection, \"get\") def", "def test_delete_all_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value =", "{'id': '2'}, {'id': '3'}]}, {'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id':", "common methods\"\"\" URI = \"/rest/testuri\" class BaseTest(unittest.TestCase): URI = \"/rest/testuri\"", "\"anotherType\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=uri,", "\"not expected\"}] response = self.resource_client.get_by('connection.name', 'expected') self.assertEqual(response, [{'name': 'expected'}, {'name':", "OF ANY KIND, either express or implied. # See the", "300) self.resource_client = ResourceClient(self.connection, self.URI) self.task = {\"task\": \"task\", \"taskState\":", "'Resource Name,') @mock.patch.object(connection, 'get') def test_get_collection_uri(self, mock_get): mock_get.return_value = {\"members\":", "{'name': 'resource1'} resource_client = ResourceClient(self.connection, self.URI) result = resource_client.merge_default_values(resource, default_values)", "See the License for the specific language governing permissions and", "self.assertEqual(actual_result, expected_result) @mock.patch.object(connection, \"get\") def test_get_all_called_once(self, mock_get): filter = \"'name'='OneViewSDK", "\"wait_for_task\") def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): response_body = {\"resource_name\": \"name\"}", "[] response = self.resource_client.get_by_name('Resource Name,') self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(connection,", "custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_custom_headers(self, mock_put): dict_to_update = {\"name\": \"test\"}", "= self.resource_client.get_by('name', 'exPEcted') self.assertEqual(response, [{\"name\": \"EXpected\"}]) mock_get_all.assert_called_once_with(filter=\"\\\"name='exPEcted'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all')", "{\"key\": \"value\"}]} self.resource_helper.get_collection() mock_get.assert_called_once_with(self.URI) @mock.patch.object(connection, \"get\") def test_get_collection_with_filter(self, mock_get): mock_get.return_value", "{} self.resource_helper.get_collection(filter=[\"name1=one\", \"name2=two\", \"name=three\"]) mock_get.assert_called_once_with(self.URI + \"?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, \"get\") def", "-*- coding: utf-8 -*- ### # (C) Copyright [2019] Hewlett", "= \"/rest/enclosures\" self.resource_client.update_with_zero_body(\"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\")", "mock_patch, mock_ensure_resource): mock_patch.return_value = {}, {} self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers)", "{\"name\": \"test\", \"type\": self.TYPE_V300, \"uri\": uri} self.resource_client._merge_default_values() self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected_dict,", "self.assertEqual(self.task, self.resource_client.data) mock_update.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor,", "\"typeV300\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) expected_uri = \"/rest/testuri\" mock_put.assert_called_once_with(expected_uri,", "= '/rest/testuri/09USE7335NW3' result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try:", "option in options: uri = self.resource_client.build_subresource_uri(option['resource'], option['subresource'], option['path']) self.assertEqual(uri, option['uri'])", "to in writing, software # distributed under the License is", "\"name\"} mock_put.return_value = self.task, self.task mock_wait4task.return_value = response_body result =", "refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z\" \\", "= ResourceClient(self.connection, self.URI) result = resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) @mock.patch.object(connection,", "super(ResourceTest, self).setUp(self.resource_client) self.resource_helper = ResourceHelper(self.URI, self.connection, None) @mock.patch.object(ResourceHelper, \"do_put\") @mock.patch.object(Resource,", "timeout=-1) self.assertTrue(result) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_force(self,", "timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "def test_update_uri(self, mock_wait4task, mock_update): dict_to_update = {\"resource_data\": \"resource_data\", \"uri\": \"a_uri\"}", "Server'\" delete_task = self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_wait4task.assert_called_with(self.task, timeout=-1) self.assertEqual(self.task, delete_task)", "None, self.response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection,", "{ \"resource_id\": \"123\", \"resource_name\": \"a name\", } mock_post.return_value = self.task,", "mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\")", "con self._client = ResourceClient(con, \"/rest/fake/resource\") def get_fake(self, uri): return self._client.get(uri)", "query = \"name NE 'WrongName'\" view = '\"{view-name}\"' scope_uris =", "'wait_for_task') def test_delete_with_custom_headers(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value", "or agreed to in writing, software # distributed under the", "custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_without_default_values(self, mock_post): dict_to_create = {\"resource_name\": \"a", "extract_id_from_uri, merge_resources, merge_default_values, unavailable_method) class StubResourceFileHandler(ResourceFileHandlerMixin, Resource): \"\"\"Stub class to", "test_get_all_called_once(self, mock_get): filter = \"'name'='OneViewSDK \\\"Test FC Network'\" sort =", "uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try:", "= [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}] mock_get.return_value = {", "= self.resource_client.get_all(count=15) expected_items = [{'id': '1'}, {'id': '2'}, {'id': '3'},", "'post') def test_create_with_custom_headers(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value", "{\"uri\": \"/uri/test\"} mock_do_get.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(Resource, \"get_by\")", "test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( '/rest/testuri/id', dict_info, custom_headers={'Extra': 'extra',", "mock_wait4task.return_value = self.task self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(force=True) mock_delete.assert_called_once_with(\"/rest/testuri?force=True\", custom_headers=None)", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_uri(self, mock_wait4task, mock_update,", "= '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() mock_wait4task.return_value", "connection('127.0.0.1', 300) self.resource_client = StubResourceSchema(self.connection) super(ResourceSchemaMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"get\") def", "mock_open.return_value = io.StringIO() result = self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, \"download_to_stream\")", "test_delete_with_custom_headers(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task", "-1) self.assertEqual(result, entity) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers_with_content_type(self, mock_task,", "\"name\"} self.resource_client.URI = \"/rest/enclosures\" mock_put.return_value = self.task, self.task mock_wait4task.return_value =", "not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client.build_uri('/rest/') except exceptions.HPOneViewUnknownType as exception:", "\"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\", "\"7\"}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In", "mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345', ['name1=one', 'name2=two', 'name=three']) mock_get.assert_called_once_with(self.URI +", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_should_wait_for_task(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body", "\"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = False mock_open.return_value = io.StringIO()", "[{\"member\": \"member\"}]} result = self.resource_client.get_all( 1, 500, filter, query, sort,", "try: self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources') except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail('Expected", "\"a name\"} mock_patch.return_value = {}, {} headers = {'Content-Type': 'application/json',", "request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v200(self, mock_patch): mock_patch.return_value =", "= \"/rest/testuri\" mock_put.return_value = None, self.response_body expected_dict = {\"name\": \"test\",", "custom_headers={'Extra': 'extra', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_patch(self,", "compliance with the License. # You may obtain a copy", "= {\"uri\": \"/rest/test\"} self.resource_client.update(data={\"name\": \"test\"}) mock_do_put.assert_called_once() mock_ensure_resource.assert_called_once() def test_ensure_resource_raise_unique_identifier_exception(self): self.resource_client.data", "exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try:", "= self.task filter = \"name='Exchange Server'\" delete_task = self.resource_client.delete_all(filter=filter, force=True,", "= [{ 'op': 'replace', 'path': '/name', 'value': 'new_name', }] mock_patch.return_value", "def test_get_with_none(self): try: self.resource_client.get(None) except ValueError as e: self.assertTrue(\"id\" in", "\"a name\"} mock_post.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client._merge_default_values()", "{'name': 'resource1'} default_values = { '200': {\"type\": \"EnclosureGroupV200\"}, '300': {\"type\":", "= self.resource_helper.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor,", "ResourceHelper(self.URI, self.connection, None) @mock.patch.object(ResourceHelper, \"do_put\") @mock.patch.object(Resource, \"ensure_resource_data\") def test_ensure_resource_should_call_once(self, mock_do_put,", "ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) def test_merge_api_default_values(self): resource", "'wait_for_task') def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value", "'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'patch')", "resource='1', subresource='2', path='sub', uri='/rest/testuri/1/sub/2'), dict( resource='/rest/testuri/3', subresource='4', path='sub', uri='/rest/testuri/3/sub/4'), dict(", "mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri(self,", "None, self.response_body self.resource_client.update(dict_to_update, uri=uri, force=True) expected_uri = \"/rest/resource/test?force=True\" mock_put.assert_called_once_with(expected_uri, dict_to_update,", "+ \"/12345\") @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_with_result(self, mock_get_by): mock_get_by.return_value = [{\"name\":", "TYPE_V300} } def setUp(self, resource_client=None): self.resource_client = resource_client self.resource_client.URI =", "\"member\"}]} result = self.resource_client.get_all( 1, 500, filter, query, sort, view,", "self.custom_headers = {\"Accept-Language\": \"en_US\"} class ResourceFileHandlerMixinTest(BaseTest): def setUp(self): self.connection =", "def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): response_body = {\"resource_name\": \"name\"} self.resource_client.URI", "except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected Exception was", "self.response_body filter = \"name='Exchange Server'\" result = self.resource_helper.delete_all(filter=filter, force=True, timeout=-1)", "{\"resource_name\": \"a name\"} mock_patch.return_value = {}, {} headers = {'Content-Type':", "mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with(\"/rest/testuri\", mock.ANY, mock.ANY) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection,", "'MyFibreNetwork', uri='/rest/other/5435534/sub') except exceptions.HPOneViewUnknownType as e: self.assertEqual('Unrecognized URI for this", "self.task, self.task mock_wait4task.return_value = entity result = self.resource_client.patch( '123a53cz', 'replace',", "hpOneView import exceptions from hpOneView.resources.resource import (ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin,", "class BaseTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200 = \"typeV200\" TYPE_V300 =", "new_resource = self.resource_client.create_with_zero_body() self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor,", "mock_get): mock_get.return_value = {'nextPageUri': None, 'members': []} result = self.resource_client.get_all()", "self.response_body mock_wait4task.return_value = self.task filter = \"name='Exchange Server'\" delete_task =", "ValueError as e: self.assertTrue(\"id\" in e.args[0]) else: self.fail() def test_create_with_none(self):", "test_get_by_id_with_result(self, mock_get): self.resource_client.get_by_id(\"123\") mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_by_id_without_result(self, mock_get): mock_get.return_value", "'get') def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {'nextPageUri': None, 'members': None}", "subresource='2', path='sub', uri='/rest/testuri/1/sub/2'), dict( resource='/rest/testuri/3', subresource='4', path='sub', uri='/rest/testuri/3/sub/4'), dict( resource='5',", "not use this file except in compliance with the License.", "custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_force(self, mock_put, mock_laod_resource):", "self.assertEqual(\"Unknown object type\", e.args[0]) else: self.fail() @mock.patch.object(connection, 'get') def test_get_schema_uri(self,", "expected = {'name': 'resource1', \"type\": \"EnclosureGroupV300\"} resource_client = ResourceClient(self.connection, self.URI)", "\"body\"} self.custom_headers = {'Accept-Language': 'en_US'} @mock.patch.object(connection, 'get') def test_get_all_called_once(self, mock_get):", "self.resource_client.get_collection('12345', 'name=name') mock_get.assert_called_once_with(self.URI + \"/12345?filter=name%3Dname\") @mock.patch.object(connection, 'get') def test_get_collection_with_multiple_filters(self, mock_get):", "uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock()", "\"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" fake_file = io.StringIO() mock_open.return_value = fake_file", "= [{'id': '1'}, {'id': '2'}, {'id': '3'}] mock_get.return_value = {", "filter = \"name='Exchange Server'\" delete_task = self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_wait4task.assert_called_with(self.task,", "'{resource_uri}?start=1' \\ '&count=500' \\ '&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27' \\ '&query=name%20NE%20%27WrongName%27' \\ '&sort=name%3Aascending' \\", "else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client._helper.build_uri('')", "e.args[0]) else: self.fail() def test_get_collection_with_none(self): try: self.resource_client.get_collection(None) except ValueError as", "uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = True mock_open.return_value = io.StringIO() result", "you may not use this file except in compliance with", "= self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, task_output) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_empty_list_when_output_is_empty(self,", "= \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() self.resource_client.upload(filepath,", "def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = [] response = self.resource_client.get_by_name(\"Resource Name,\")", "Network'\" sort = 'name:ascending' query = \"name NE 'WrongName'\" view", "def test_build_subresource_uri(self): options = [ dict( resource=\"1\", subresource=\"2\", path=\"sub\", uri=\"/rest/testuri/1/sub/2\"),", "= \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path)", "mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with(\"/rest/testuri\", mock.ANY, mock.ANY) @mock.patch.object(connection, \"post_multipart_with_response_handling\")", "def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock() uri = \"/rest/testuri/\"", "call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get') def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list", "def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceZeroBody(self.connection) super(ResourceZeroBodyMixinTest,", "\"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\ \"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\")", "\"uri\": \"a_uri\"} mock_update.return_value = self.task, self.response_body mock_wait4task.return_value = self.task update_task", "\"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]}, {'nextPageUri': None, \"members\": [{\"id\": \"7\"},", "self._connection = con self._client = ResourceClient(con, \"/rest/fake/resource\") def get_fake(self, uri):", "mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, \"SPPgen9snap6.2015_0405.81.iso\") @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self,", "\"get\") def test_get_collection_with_path(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(path=\"/test\") mock_get.assert_called_once_with(self.URI +", "\"name=three\"]) mock_get.assert_called_once_with(self.URI + \"?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, \"get\") def test_get_collection_should_return_list(self, mock_get): mock_get.return_value", "raised\") @mock.patch.object(connection, 'get') def test_get_utilization_with_args(self, mock_get): self.resource_client.get_utilization('09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter='startDate=2016-05-30T03:29:42.361Z', refresh=True,", "task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, []) @mock.patch.object(connection, 'post') def test_create_report_should_raise_exception_when_not_task(self,", "in e.args[0]) else: self.fail() @mock.patch.object(connection, 'delete') def test_delete_with_dict_uri(self, mock_delete): resource", "Server'\" uri = \"/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True\" self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_delete.assert_called_once_with(uri) @mock.patch.object(connection, 'delete')", "dict_to_update = {\"name\": \"test\", \"type\": \"typeV300\"} self.resource_client.data = {'uri': uri}", "mock_get_all.return_value = [{\"name\": \"EXpected\"}, {\"name\": \"not expected\"}] response = self.resource_client.get_by('name',", "fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=[\"startDate=2016-05-30T03:29:42.361Z\", \"endDate=2016-05-31T03:29:42.361Z\"], refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\"", "of results to be returned but for pagination purposes, a", "[{'id': '7'}]}] mock_get.side_effect = results result = self.resource_client.get_all(count=15) expected_items =", "'delete') def test_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None, self.response_body filter =", "\"/rest/testuri\"}] self.resource_client.data = {\"name\": \"testname\"} mock_do_get.return_value = get_by_return_value self.resource_client.ensure_resource_data(update_data=True) self.assertEqual(self.resource_client.data,", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "[{'id': '7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=15) expected_calls =", "'WrongName'\" mock_get.return_value = {\"members\": [{\"member\": \"member\"}]} result = self.resource_helper.get_all( 1,", "\"/rest/enclosures/09USE133E5H4/configuration\", {}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task,", "\"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" fake_resource = FakeResource(None) try: fake_resource.get_fake(uri) except exceptions.HPOneViewUnknownType as exception:", "subresource=None, path='/sub/', uri='/rest/testuri/13/sub'), ] for option in options: uri =", "{\"Accept-Language\": \"en_US\"} class ResourceFileHandlerMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300)", "test_upload_should_call_post_multipart(self, mock_post_multipart): uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value =", "def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client.build_subresource_uri(None, \"123456\", 'sub-path') except exceptions.HPOneViewValueError as exception:", "mock_wait4task, mock_update): mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration',", "'members': None} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor,", "\"created\": \"2015-03-24T15: 32: 50.889Z\"}, {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-13T14: 10: 50.322Z\"}", "self.resource_client._helper.build_uri(\"/rest/\") except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected Exception", "'wait_for_task') def test_update_uri(self, mock_wait4task, mock_update): dict_to_update = {\"resource_data\": \"resource_data\", \"uri\":", "= self.task self.resource_client.create_with_zero_body(custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor,", "= ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with('/rest/testuri/id', dict_info, custom_headers=headers) @mock.patch.object(connection,", "test resource utilization methods\"\"\" class StubResourceSchema(ResourceSchemaMixin, Resource): \"\"\"Stub class to", "self.resource_client = resource_client self.resource_client.URI = self.URI self.resource_client.DEFAULT_VALUES = self.DEFAULT_VALUES self.resource_client.data", "'get') def test_get_collection_should_return_list(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\":", "self.task filter = \"name='Exchange Server'\" delete_task = self.resource_client.delete_all(filter=filter, force=True, timeout=-1)", "name\", } mock_post.return_value = self.task, {} mock_wait4task.return_value = created_resource result", "URI for this resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" fake_resource = FakeResource(None)", "\"type\": \"resource\"} resource2 = {\"name\": \"resource2\", \"port\": \"1\"} expected_resource =", "Exception was not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri( \"/rest/test/another/resource/uri/09USE7335NW3\") except", "= FakeResource(None) try: fake_resource.get_fake(uri) except exceptions.HPOneViewUnknownType as exception: self.assertEqual(message, exception.args[0])", "= \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with('/rest/testuri', mock.ANY, mock.ANY)", "file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def", "result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_no_members(self, mock_get):", "uri} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) self.assertEqual(self.response_body, self.resource_client.data) mock_put.assert_called_once_with(uri, expected,", "@mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value =", "view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\"", "def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client._helper.build_uri('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])", "mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"resource_name\": \"a name\",", "\"post\") def test_create_with_custom_headers(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value", "expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In this case,", "expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES)", "mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_uri_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization()", "mock_patch.return_value = {}, {} headers = {\"Content-Type\": \"application/json\", \"Extra\": \"extra\"}", "ResourceTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResource(self.connection)", "= \"/rest/testuri\" mock_put.assert_called_once_with(expected_uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def", "test_create_report_should_return_output_list_when_results(self, mock_get_completed_task, mock_post): task_output = [ {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-24T15:", "\\ \"&count=500\" \\ \"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27\" \\ \"&query=name%20NE%20%27WrongName%27\" \\ \"&sort=name%3Aascending\".format(resource_uri=self.URI) self.assertEqual([{\"member\": \"member\"}],", "operations\"\"\" class StubResourceUtilization(ResourceUtilizationMixin, Resource): \"\"\"Stub class to test resource utilization", "self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.delete('1', custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={'Accept-Language': 'en_US'})", "\"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_multiple_filters(self, mock_get, mock_ensure_resource):", "test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task", "test_create_with_custom_headers(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value = {},", "= {\"name\": \"test\", \"type\": \"typeV300\", \"uri\": uri} mock_put.return_value = None,", "'/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() self.resource_client.upload(filepath, uri)", "ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict,", "mock_put): dict_to_update = {\"name\": \"test\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update,", "= {\"resource_name\": \"name\"} self.resource_client.URI = \"/rest/enclosures\" mock_put.return_value = self.task, self.task", "test_create_uri_with_force(self, mock_post): dict_to_create = {\"resource_name\": \"a name\", \"force\": \"yes\"} mock_post.return_value", "resource=\"1\", subresource=\"2\", path=\"sub\", uri=\"/rest/testuri/1/sub/2\"), dict( resource=\"/rest/testuri/3\", subresource=\"4\", path=\"sub\", uri=\"/rest/testuri/3/sub/4\"), dict(", "'extra', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_patch(self, mock_wait4task,", "'get_completed_task') def test_create_report_should_wait_task_completion(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] =", "= \"name='Exchange Server'\" result = self.resource_helper.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(Resource,", "'delete') def test_helper_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None, self.response_body filter =", "uri='/rest/other/5435534/sub') except exceptions.HPOneViewUnknownType as e: self.assertEqual('Unrecognized URI for this resource',", "@mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers(self, mock_task, mock_patch): dict_info = {\"resource_name\": \"a", "@mock.patch.object(connection, 'get') def test_get_all_called_once(self, mock_get): filter = \"'name'='OneViewSDK \\\"Test FC", "\"resource2\"} result_list = merge_default_values([resource1, resource2], default_type) expected_list = [ {\"name\":", "test_patch_request_when_id_is_provided_v200(self, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" request_body = [{ \"op\":", "mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) class ResourceSchemaMixinTest(BaseTest): def", "results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id':", "mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_autofix(self, mock_get_all):", "= self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") self.assertEqual(self.resource_client.data,", "extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '3518be0e-17c1-4189-8f81-83f3724f6155') def test_extract_id_from_uri_unsupported(self): # This example", "\"wait_for_task\") def test_delete_with_force(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value = self.task, self.response_body", "mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY,", "{\"type\": TYPE_V200}, \"300\": {\"type\": TYPE_V300} } def setUp(self, resource_client=None): self.resource_client", "= \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() mock_wait4task.return_value = fake_associated_resurce result", "@mock.patch.object(connection, \"put\") def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None, self.response_body self.resource_client.URI", "{\"name\": \"test\", \"type\": \"typeV300\", \"uri\": uri} mock_put.return_value = None, self.response_body", "self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources?param=value\") uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_different_resource_uri_should_fail(self, mock_get):", "mock_wait4task.return_value = entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_wait4task.assert_called_once_with(self.task, mock.ANY) class ResourceUtilizationMixinTest(BaseTest):", "\"typeV300\", \"uri\": uri} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) self.assertEqual(self.response_body, self.resource_client.data)", "\"get_by\") def test_get_by_name_with_result(self, mock_get_by): self.resource_client.get_by_name(\"Resource Name,\") mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(Resource,", "\"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value = self.task,", "self.assertEqual(response, [{'name': 'expected'}, {'name': 'not expected'}]) mock_get_all.assert_called_once_with(filter=\"\\\"connection.name='expected'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all')", "= ['one', 'two', {'tree': 3}, 'four', 5] dict_transformed = transform_list_to_dict(list=list)", "@mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch, mock_ensure_resource): uri", "def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourcePatch(self.connection) super(ResourcePatchMixinTest,", "'wait_for_task') def test_patch_return_entity(self, mock_wait4task, mock_patch): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value", "[{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}]}, {\"nextPageUri\": uri_list[2], \"members\": [{\"id\":", "{} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor,", "\"/rest/testuri\" dict_to_update = {\"name\": \"test\", \"type\": \"typeV300\"} self.resource_client.data = {'uri':", "@mock.patch.object(connection, \"get\") def test_get_by_id_with_result(self, mock_get): self.resource_client.get_by_id(\"123\") mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def", "as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail('Expected Exception was not raised')", "custom_headers=None) @mock.patch.object(connection, 'post') def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create = {\"resource_name\": \"a", "self.response_body resource_client = ResourceClient(self.connection, self.URI) resource_client.update(dict_to_update, uri=uri) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None)", "by OneView. \"\"\" uri_list = ['/rest/testuri?start=0&count=3', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results =", "mock_get_by): mock_get_by.return_value = [] response = self.resource_client.get_by_name('Resource Name,') self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\",", "mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v300(self, mock_patch,", "self.task, {} mock_wait4task.return_value = self.task self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60)", "def test_patch_request_when_id_is_provided_v300(self, mock_patch): request_body = [{ 'op': 'replace', 'path': '/name',", "mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch( '123a53cz', 'replace',", "\"put\") def test_update_without_default_values(self, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update =", "\"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch, mock_ensure_resource): uri =", "\"\"\"Stub class to test resource file operations\"\"\" class StubResourceZeroBody(ResourceZeroBodyMixin, Resource):", "call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list =", "mock_post.assert_called_once_with( '/rest/testuri', {}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_and_custom_headers(self,", "mock_post_multipart.assert_called_once_with(uri, filepath, 'SPPgen9snap6.2015_0405.81.iso') @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath =", "= {\"name\": \"test\", \"type\": \"anotherType\"} uri = \"/rest/resource/test\" mock_put.return_value =", "{}, {} self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\",", "'member'}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri", "self.resource_client.get_by_uri(\"/rest/testuri\") mock_get.assert_called_once_with('/rest/testuri') @mock.patch.object(connection, \"get\") def test_get_by_id_with_result(self, mock_get): self.resource_client.get_by_id(\"123\") mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection,", "[\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=1\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"},", "mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task delete_task", "\"updated name\"} mock_do_get.return_value = updated_data self.resource_client.refresh() self.assertEqual(self.resource_client.data, updated_data) @mock.patch.object(connection, \"post\")", "= self.task, {} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\") mock_post.assert_called_once_with(\"/rest/path/create-report\", {}) @mock.patch.object(connection,", "'6'}]}, {'nextPageUri': None, 'members': [{'id': '7'}, {'id': '8'}]}] mock_get.side_effect =", "= task_with_output self.resource_client.create_report(\"/rest/path/create-report\", timeout=60) mock_get_completed_task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task')", "try: self.resource_client.get_collection(None) except ValueError as e: self.assertTrue(\"id\" in e.args[0]) else:", "{}, {} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post')", "io.StringIO() mock_open.return_value = fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, uri,", "3}, 'four', 5] dict_transformed = transform_list_to_dict(list=list) self.assertEqual(dict_transformed, {'5': True, 'four':", "{\"resource_name\": \"name\"} mock_put.return_value = self.task, self.task mock_wait4task.return_value = response_body result", "super(ResourceFileHandlerMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart(self, mock_post_multipart): uri = \"/rest/testuri/\"", "\"wait_for_task\") def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock() uri =", "= \"/rest/testuri\" dict_info = {\"resource_name\": \"a name\"} mock_patch.return_value = {},", "\"force\": \"yes\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, timeout=-1) expected_uri =", "self.resource_client.create(dict_to_create) mock_post.assert_called_once_with(self.URI, expected, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_without_default_values(self, mock_post): dict_to_create", "{} self.resource_client.get_collection('12345', ['name1=one', 'name2=two', 'name=three']) mock_get.assert_called_once_with(self.URI + \"/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, 'get')", "= self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'post') def test_create_with_zero_body_without_task(self,", "= response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection,", "mock_put.return_value = None, self.response_body expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V300,", "{'Accept-Language': 'en_US'} @mock.patch.object(connection, 'get') def test_get_all_called_once(self, mock_get): filter = \"'name'='OneViewSDK", "self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_default_api_version_300(self, mock_post):", "resource_client.update(dict_to_update, uri=uri) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "'one': True, 'tree': 3, 'two': True}) def test_extract_id_from_uri(self): uri =", "StubResourceSchema(self.connection) super(ResourceSchemaMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"get\") def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI", "mock_put.return_value = None, self.response_body self.connection._apiVersion = 200 expected_dict = {\"name\":", "test_get_by_id_without_result(self, mock_get): mock_get.return_value = [] response = self.resource_client.get_by_id(\"123\") self.assertIsNone(response) mock_get.assert_called_once_with(\"/rest/testuri/123\")", "mock_wait4task, mock_update): dict_to_update = {\"resource_data\": \"resource_data\", \"uri\": \"a_uri\"} mock_update.return_value =", "\"/rest/enclosures\" mock_put.return_value = self.task, self.task mock_wait4task.return_value = response_body result =", "\"type\": \"anotherType\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update,", "mock_get.assert_called_once_with(self.URI + \"?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, \"get\") def test_get_collection_should_return_list(self, mock_get): mock_get.return_value =", "test resource file operations\"\"\" class StubResourceZeroBody(ResourceZeroBodyMixin, Resource): \"\"\"Stub class to", "[{'id': '7'}]}] mock_get.side_effect = results result = self.resource_client.get_all() expected_items =", "try: self.resource_client.build_uri( '/rest/test/another/resource/uri/09USE7335NW3') except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else:", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "name\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\":", "= \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() mock_wait4task.return_value", "None, mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, 'SPPgen9snap6.2015_0405.81.iso') @mock.patch.object(connection, 'post_multipart_with_response_handling') def", "uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = True mock_open.return_value = io.StringIO() result", "self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'post') def test_create_when_the_resource_is_a_list(self, mock_post):", "[]) @mock.patch.object(ResourceHelper, \"do_get\") def test_refresh(self, mock_do_get): updated_data = {\"resource_name\": \"updated", "{} mock_get_completed_task.return_value = task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, []) @mock.patch.object(connection,", "uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task,", "\"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources\") uri =", "test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client._helper.build_uri('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else:", "\"value\"}]} self.resource_helper.get_collection() mock_get.assert_called_once_with(self.URI) @mock.patch.object(connection, \"get\") def test_get_collection_with_filter(self, mock_get): mock_get.return_value =", "was not raised\") @mock.patch.object(connection, \"get\") def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list =", "custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create = {\"resource_name\": \"a", "exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_id_should_work(self): input", "self.fail() @mock.patch.object(connection, 'get') def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\")", "mock_patch): request_body = [{ 'op': 'replace', 'path': '/name', 'value': 'new_name',", "custom_headers={'Accept-Language': 'en_US', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_patch_return_entity(self,", "self.fail('Expected Exception was not raised') @mock.patch.object(connection, 'get') def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get):", "mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI)", "'/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, fake_response_body result =", "\"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]}, {\"nextPageUri\": None, \"members\": [{\"id\": \"7\"}]}]", "to be returned but for pagination purposes, a nextPageUri is", "self.response_body self.resource_client.update(dict_to_update) expected_uri = \"/rest/testuri\" mock_put.assert_called_once_with(expected_uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\")", "= ResourceClient(self.connection, self.URI) resource_client.update(dict_to_update, uri=uri) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put')", "mock_get.assert_called_once_with(self.URI + \"/test\") @mock.patch.object(connection, \"get\") def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value =", "\\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_id_with_defaults(self, mock_get): self.resource_client.get_utilization('09USE7335NW3') expected_uri", "= {\"task\": \"task\", \"taskState\": \"Finished\"} self.response_body = {\"body\": \"body\"} self.custom_headers", "\"wait_for_task\") def test_delete_with_custom_headers(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value = self.task, self.response_body", "was not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client.build_uri('/rest/') except exceptions.HPOneViewUnknownType as", "@mock.patch.object(connection, \"post\") def test_create_without_default_values(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"}", "'/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = True mock_open.return_value = io.StringIO() result = self.resource_client.download(uri,", "{\"name\": \"resource1\"} resource2 = {\"name\": \"resource2\"} result_list = merge_default_values([resource1, resource2],", "timeout=-1) self.assertEqual(self.task, delete_task) mock_delete.assert_called_once_with(self.URI + \"/1?force=True\", custom_headers=None) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor,", "\"type\": \"anotherType\"} self.resource_client.create(dict_to_create) mock_post.assert_called_once_with(self.URI, expected, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_without_default_values(self,", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_uri_called_once(self, mock_put, mock_ensure_resource): uri =", "= self.task self.resource_client.update_with_zero_body(uri=\"/rest/testuri\", custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\")", "super(ResourceZeroBodyMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post):", "mock_download_to_stream.return_value = False mock_open.return_value = io.StringIO() result = self.resource_client.download(uri, file_path)", "= self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_called_once(self,", "try: self.resource_client.build_uri('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected", "{\"resource_name\": \"a name\"} mock_post.return_value = {}, {} self.connection._apiVersion = 200", "file except in compliance with the License. # You may", "False, -1) except exceptions.HPOneViewUnknownType as e: self.assertEqual(\"Unknown object type\", e.args[0])", "mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI)", "\"get\") def test_get_collection_should_return_list(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\":", "test_build_subresource_uri(self): options = [ dict( resource='1', subresource='2', path='sub', uri='/rest/testuri/1/sub/2'), dict(", "custom_headers={\"Accept-Language\": \"en_US\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "mock_put.return_value = None, self.response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result,", "self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value", "= \"/rest/testuri\" request_body = [{ \"op\": \"replace\", \"path\": \"/name\", \"value\":", "uri=uri, force=True) expected_uri = \"/rest/resource/test?force=True\" mock_put.assert_called_once_with(expected_uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put')", "= {\"nextPageUri\": None, \"members\": []} result = self.resource_client.get_all() self.assertEqual(result, [])", "self.resource_client.ensure_resource_data(update_data=True) self.assertEqual(self.resource_client.data, get_by_return_value[0]) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_without_data_update(self, mock_get_by): mock_get_by.return_value =", "'wait_for_task') def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value", "mock_patch): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task, self.task mock_wait4task.return_value", "@mock.patch.object(connection, 'post') def test_create_uri_with_force(self, mock_post): dict_to_create = {\"resource_name\": \"a name\",", "raised\") def test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI for this resource\"", "License. ### import io import unittest import mock from mock", "= StubResourceUtilization(self.connection) super(ResourceUtilizationMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_args(self,", "dict_to_create = {\"resource_name\": \"a name\", \"force\": \"yes\"} mock_post.return_value = {},", "\"get\") def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In this case, the user", "def test_update_with_default_api_version_300(self, mock_put): dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\"", "custom_headers=headers) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers(self, mock_task,", "= { \"nextPageUri\": uri, \"members\": members, \"uri\": uri } result", "{\"name\": \"test\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY,", "None, {} try: self.resource_client.create_report(\"/rest/path/create-report\") except exceptions.HPOneViewException as exception: self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED, exception.args[0])", "connection('127.0.0.1', 300) self.resource_client = StubResourceFileHandler(self.connection) super(ResourceFileHandlerMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post_multipart_with_response_handling\") def", "mock_post_multipart.return_value = None, fake_response_body result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body)", "\"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_helper.get_all(uri=\"/rest/other/resource/12467836/subresources\") except", "mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_return_entity(self,", "path='/sub/', uri='/rest/testuri/11/sub/12'), dict( resource='/rest/testuri/13', subresource=None, path='/sub/', uri='/rest/testuri/13/sub'), ] for option", "uri = \"/rest/testuri\" dict_to_update = {\"name\": \"test\", \"type\": \"typeV300\"} self.resource_client.data", "= entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") self.assertEqual(self.resource_client.data, entity) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_request_when_uri_is_provided(self, mock_patch):", "self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_build_subresource_uri(self):", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock() uri", "self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_should_not_override_resource_properties(self,", "'new_name', }] mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch(", "@mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers(self, mock_task, mock_patch): dict_info =", "= '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155' id = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(id, extracted_id)", "self.response_body self.resource_client.update(dict_to_update) self.assertEqual(self.response_body, self.resource_client.data) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "{\"Content-Type\": \"application/json\", \"Extra\": \"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers)", "test resource schema methods\"\"\" class StubResource(Resource): \"\"\"Stub class to test", "\"'name'='OneViewSDK \\\"Test FC Network'\" sort = 'name:ascending' query = \"name", "= self.task self.resource_client.create_with_zero_body('1', custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post')", "result = self.resource_client.get_all( 1, 500, filter, query, sort, view, 'name,owner,modified',", "'wait_for_task') def test_delete_by_id_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value", "e.args[0]) else: self.fail() def test_update_with_empty_dict(self): try: self.resource_client.update({}) except ValueError as", "self.task self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v200(self,", "as e: self.assertEqual(\"Unknown object type\", e.args[0]) else: self.fail() @mock.patch.object(connection, 'get')", "def test_get_utilization_with_args(self, mock_get): self.resource_client.get_utilization('09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter='startDate=2016-05-30T03:29:42.361Z', refresh=True, view='day') expected_uri =", "mock_delete.assert_called_once_with(\"/rest/testuri?force=True\", custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_custom_headers(self,", "\"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_patch_return_entity(self, mock_wait4task, mock_patch, mock_ensure_resource):", "self.assertEqual(message, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'get')", "\"uri\": uri} self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\")", "None, \"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all()", "mock_wait4task.not_been_called() @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get,", "ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_update_with_empty_dict(self):", "default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_default_api_version_300(self, mock_put): dict_to_update", "'new_name', 60) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def", "\"2015-03-13T14: 10: 50.322Z\"} ] task_with_output = self.task.copy() task_with_output['taskOutput'] = task_output", "call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get') def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list", "self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value", "= self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, update_task) mock_update.assert_called_once_with(\"a_uri\", dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put')", "\"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce =", "uri} self.resource_client._merge_default_values() self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\")", "self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = '/rest/testuri?start=0&count=-1'", "mock_get.assert_called_once_with(self.URI + \"/schema\") @mock.patch.object(connection, 'get') def test_get_by_id_uri(self, mock_get): self.resource_client.get('12345') mock_get.assert_called_once_with(self.URI", "= \"/rest/resource/test\" mock_put.return_value = None, self.response_body response = self.resource_client.update(dict_to_update, uri=uri)", "task_with_output self.resource_client.create_report(\"/rest/path/create-report\", timeout=60) mock_get_completed_task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def", "uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = False mock_open.return_value = io.StringIO() result", "'/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) def test_get_utilization_with_empty(self): try: self.resource_client.get_utilization('') except ValueError as exception:", "self.resource_client.create_report(\"/rest/path/create-report\", timeout=60) mock_get_completed_task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_output_list_when_results(self,", "@mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_skip_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\": \"expected\"}, {\"name\":", "'/name', 'new_name', -1) mock_wait4task.assert_called_once_with(self.task, mock.ANY) def test_delete_with_none(self): try: self.resource_client.delete(None) except", "Exception was not raised\") def test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI", "\"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri", "@mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value =", "setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceZeroBody(self.connection) super(ResourceZeroBodyMixinTest, self).setUp(self.resource_client)", "'&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27' \\ '&query=name%20NE%20%27WrongName%27' \\ '&sort=name%3Aascending' \\ '&view=%22%7Bview-name%7D%22' \\ '&fields=name%2Cowner%2Cmodified' \\", "= None self.assertEqual(actual_result, expected_result) @mock.patch.object(connection, \"get\") def test_get_all_called_once(self, mock_get): filter", "{\"Extra\": \"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( uri,", "@mock.patch.object(connection, 'post') def test_create_with_api_version_200(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"}", "\"EXpected\"}, {\"name\": \"not expected\"}] response = self.resource_client.get_by('name', 'exPEcted') self.assertEqual(response, [{\"name\":", "\"patch\") def test_patch_with_custom_headers_v300(self, mock_patch, mock_ensure_resource): mock_patch.return_value = {}, {} self.resource_client.patch(\"operation\",", "sort = 'name:ascending' query = \"name NE 'WrongName'\" view =", "mock_post): mock_post.return_value = None, self.response_body new_resource = self.resource_client.create_with_zero_body() self.assertNotEqual(new_resource, self.resource_client)", "mock_get_by): self.resource_client.data = {\"name\": \"testname\"} mock_get_by.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound):", "= {\"resource_name\": \"a name\"} mock_post.return_value = {}, {} resource_client =", "patch operations\"\"\" class StubResourceUtilization(ResourceUtilizationMixin, Resource): \"\"\"Stub class to test resource", "@mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body =", "mock_post_multipart): uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None,", "\"/rest/testuri\" dict_to_update = {\"name\": \"test\"} expected = {\"name\": \"test\", \"uri\":", "'5'}, {'id': '6'}]}, {'nextPageUri': None, 'members': [{'id': '7'}, {'id': '8'}]}]", "mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_with_result(self, mock_get_by): mock_get_by.return_value =", "= {\"resource_name\": \"a name\"} mock_post.return_value = {}, {} expected_dict =", "['/rest/testuri?start=0&count=3', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'},", "\"test\"} expected = {\"name\": \"test\", \"uri\": uri, \"type\": \"typeV300\"} mock_put.return_value", "\"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27\" \\ \"&query=name%20NE%20%27WrongName%27\" \\ \"&sort=name%3Aascending\".format(resource_uri=self.URI) self.assertEqual([{\"member\": \"member\"}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection,", "KIND, either express or implied. # See the License for", "self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_return_resource_when_response_is_not_task(self,", "= self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch( '123a53cz', 'replace', '/name',", "{}, {} self.connection._apiVersion = 200 self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with(uri, request_body,", "{\"name\": \"test\", \"type\": self.TYPE_V300} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None)", "name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection,", "path='sub', uri='/rest/testuri/7/sub/8'), dict( resource=None, subresource='/rest/testuri/9/sub/10', path='sub', uri='/rest/testuri/9/sub/10'), dict( resource='/rest/testuri/11', subresource='12',", "mock_get_all): mock_get_all.return_value = [{\"name\": \"EXpected\"}, {\"name\": \"not expected\"}] response =", "self.resource_client.get_utilization(fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=\"startDate=2016-05-30T03:29:42.361Z\", refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\", "\"test\", \"type\": self.TYPE_V300} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection,", "= connection('127.0.0.1', 300) self.resource_client = StubResourcePatch(self.connection) super(ResourcePatchMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\")", "else: self.fail() def test_update_with_empty_dict(self): try: self.resource_client.update({}) except ValueError as e:", "class FakeResource(object): def __init__(self, con): self._connection = con self._client =", "uri, \"type\": \"typeV300\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected,", "test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value", "exception: self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection,", "'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value = self.task,", "mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\")", "mock_wait4task): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.data =", "\"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]}, {\"nextPageUri\": None, \"members\": [{\"id\": \"7\"},", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_return_entity(self, mock_wait4task, mock_put,", "\"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update, mock_ensure_resource):", "\"get_completed_task\") def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" dict_info", "def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\"", "mock_wait4task, mock_post): response_body = {\"resource_name\": \"name\"} mock_post.return_value = self.task, self.task", "def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value') uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get')", "mock_open.return_value = io.StringIO() result = self.resource_client.download(uri, file_path) self.assertFalse(result) def test_transform_list_to_dict(self):", "{}, {} self.resource_client.create(dict_to_create, timeout=-1) expected_uri = \"/rest/testuri\" mock_post.assert_called_once_with(expected_uri, dict_to_create, custom_headers=None)", "def test_merge_default_values(self): default_type = {\"type\": \"type1\"} resource1 = {\"name\": \"resource1\"}", "exceptions.HPOneViewUnknownType as e: self.assertEqual('Unrecognized URI for this resource', e.args[0]) else:", "self.assertEqual(result, entity) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch):", "mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('1', custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection,", "def test_update_with_none(self): try: self.resource_client.update(None) except ValueError as e: self.assertTrue(\"Resource\" in", "def test_get_with_uri_should_work(self, mock_get): mock_get.return_value = {} uri = self.URI +", "try: self.resource_client.build_uri(None) except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected", "was not raised\") def test_build_subresource_uri(self): options = [ dict( resource=\"1\",", "'8'}]}] mock_get.side_effect = results self.resource_client.get_all() expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]", "'/rest/test/another/resource/uri/09USE7335NW3') except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected Exception", "(the \"License\"); # you may not use this file except", "self.resource_client.build_subresource_uri(option['resource'], option['subresource'], option['path']) self.assertEqual(uri, option['uri']) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client.build_subresource_uri(None, \"123456\",", "@mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce", "self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open'))", "{'id': '6'}]}, {'nextPageUri': None, 'members': [{'id': '7'}, {'id': '8'}]}] mock_get.side_effect", "'put') def test_update_with_uri_called_once(self, mock_put): dict_to_update = {\"name\": \"test\"} uri =", "@mock.patch.object(connection, 'put') def test_update_without_default_values(self, mock_put): dict_to_update = {\"name\": \"test\"} uri", "custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value", "try: self.resource_client.create({}) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else:", "resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with('/rest/testuri/id', dict_info, custom_headers=headers) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task')", "@mock.patch.object(connection, 'post') def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None, self.response_body result", "{'Extra': 'extra'} self.connection._apiVersion = 300 resource_client = ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id',", "[{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=15) expected_calls =", "@mock.patch.object(connection, 'post') def test_create_when_the_resource_is_a_list(self, mock_post): dict_to_create = [{\"resource_name\": \"a name\"}]", "mock_patch, mock_ensure_resource): mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch(\"operation\",", "def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None, self.response_body new_resource = self.resource_client.create_with_zero_body()", "expected'}]) mock_get_all.assert_called_once_with(filter=\"\\\"connection.name='expected'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with_uri(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork',", "else: self.fail() @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update):", "\"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" try: self.resource_client.get(uri) except exceptions.HPOneViewUnknownType as exception: self.assertEqual(message, exception.args[0]) else:", "= {'uri': uri} expected = {\"name\": \"test\", \"type\": \"typeV300\", \"uri\":", "mock_put.return_value = None, self.response_body expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V300}", "[] response = self.resource_client.get_by_id(\"123\") self.assertIsNone(response) mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_collection_uri(self,", "= ResourceHelper(self.URI, self.connection, None) @mock.patch.object(ResourceHelper, \"do_put\") @mock.patch.object(Resource, \"ensure_resource_data\") def test_ensure_resource_should_call_once(self,", "\"type\": \"resource\", \"port\": \"1\"} merged_resource = merge_resources(resource1, resource2) self.assertEqual(merged_resource, expected_resource)", "@mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task,", "[{\"id\": \"7\"}]}] mock_get.side_effect = results result = self.resource_client.get_all(count=15) expected_items =", "\"a name\"} mock_patch.return_value = {}, {} headers = {'Extra': 'extra'}", "@mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch): entity =", "'3'}] mock_get.return_value = { 'nextPageUri': uri, 'members': members, 'uri': uri", "\"get\") def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") class ResourceTest(BaseTest):", "\"get\") def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=15\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results", "{'nextPageUri': None, 'members': []} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection,", "mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_custom_headers(self, mock_put,", "= [] response = self.resource_client.get_by_id(\"123\") self.assertIsNone(response) mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def", "\"/12345?filter=name%3Dname\") @mock.patch.object(connection, 'get') def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345',", "self.resource_client.get('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_with_result(self, mock_get_by): mock_get_by.return_value", "super(ResourceClientTest, self).setUp() self.host = '127.0.0.1' self.connection = connection(self.host, 300) self.resource_client", "'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri", "\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_patch_return_entity(self,", "as e: self.assertTrue(\"id\" in e.args[0]) else: self.fail() def test_get_collection_with_none(self): try:", "# # Unless required by applicable law or agreed to", "@mock.patch.object(connection, 'put') def test_update_with_api_version_200(self, mock_put): dict_to_update = {\"name\": \"test\"} uri", "ResourceClient(self.connection, self.URI) result = resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) @mock.patch.object(connection, 'post_multipart_with_response_handling')", "ValueError as e: self.assertTrue(\"field\" in e.args[0]) else: self.fail() @mock.patch.object(connection, 'get')", "force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_should_wait_for_task(self, mock_wait4task,", "def test_delete_all_should_wait_for_task(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value =", "def test_ensure_resource_raise_resource_not_found_exception_with_uri(self, mock_do_get): self.resource_client.data = {\"uri\": \"/uri/test\"} mock_do_get.return_value = []", "self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\",", "def test_update_should_not_override_resource_properties(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\", \"type\": \"anotherType\"}", "expected_calls) @mock.patch.object(connection, 'get') def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3',", "{ \"nextPageUri\": uri, \"members\": members, \"uri\": uri } result =", "= {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_client.get_collection('12345') mock_get.assert_called_once_with(self.URI + \"/12345\")", "mock_wait4task.return_value = response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body)", "\"get\") def test_get_utilization_with_multiple_filters(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization( fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=[\"startDate=2016-05-30T03:29:42.361Z\", \"endDate=2016-05-31T03:29:42.361Z\"], refresh=True,", "None, 'members': [{'id': '7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=3)", "'/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = False mock_open.return_value = io.StringIO() result = self.resource_client.download(uri,", "{\"uri\": \"/rest/test\"} self.resource_client.update(data={\"name\": \"test\"}) mock_do_put.assert_called_once() mock_ensure_resource.assert_called_once() def test_ensure_resource_raise_unique_identifier_exception(self): self.resource_client.data =", "{} expected = {'name': 'resource1'} resource_client = ResourceClient(self.connection, self.URI) result", "@mock.patch.object(connection, \"get\") def test_get_utilization_by_id_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\"", "class StubResourceZeroBody(ResourceZeroBodyMixin, Resource): \"\"\"Stub class to test resoruce zero body", "[] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_raise_resource_not_found_exception_without_uri(self, mock_get_by): self.resource_client.data", "mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\"} mock_put.return_value = None, self.response_body", "raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client._helper.build_uri('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID,", "<reponame>gzecchi/oneview-python<filename>tests/unit/resources/test_resource.py # -*- coding: utf-8 -*- ### # (C) Copyright", "mock_get_by.return_value = [] response = self.resource_client.get_by_name('Resource Name,') self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", 'Resource", "'/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_uri_with_defaults(self, mock_get): self.resource_client.get_utilization('/rest/testuri/09USE7335NW3') expected_uri =", "mock_wait4task.return_value = self.task self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, self.resource_client.data) mock_update.assert_called_once_with(uri, expected, custom_headers=None)", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_id_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri", "view = '\"{view-name}\"' scope_uris = '/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a' mock_get.return_value = {\"members\": [{\"member\":", "timeout=-1) mock_post.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", {}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "input = '/rest/testuri/09USE7335NW3' expected_output = '/rest/testuri/09USE7335NW3' result = self.resource_client.build_uri(input) self.assertEqual(expected_output,", "\"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, self.response_body) class ResourcePatchMixinTest(BaseTest): def setUp(self): self.connection =", "\"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value =", "def test_patch_request_when_uri_is_provided(self, mock_patch, mock_ensure_resource): request_body = [{ \"op\": \"replace\", \"path\":", "\"Unrecognized URI for this resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" fake_resource =", "\"type\": self.TYPE_V200} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put')", "{} mock_wait4task.return_value = self.task self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(connection,", "else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_id_should_work(self): input =", "= self.task.copy() task_with_output['taskOutput'] = task_output mock_post.return_value = self.task, {} mock_get_completed_task.return_value", "entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") self.assertEqual(self.resource_client.data, entity) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\")", "fake_associated_resurce result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor,", "mock_post_multipart): uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task,", "request_body, custom_headers={\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v200(self, mock_patch,", "mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body()", "self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(force=True)", "dict( resource=\"1\", subresource=\"2\", path=\"sub\", uri=\"/rest/testuri/1/sub/2\"), dict( resource=\"/rest/testuri/3\", subresource=\"4\", path=\"sub\", uri=\"/rest/testuri/3/sub/4\"),", "class ResourcePatchMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client =", "implied. # See the License for the specific language governing", "= entity result = self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', -1)", "self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceFileHandler(self.connection) super(ResourceFileHandlerMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection,", "not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client.build_uri( '/rest/test/another/resource/uri/09USE7335NW3') except exceptions.HPOneViewUnknownType as", "[{\"name\": \"EXpected\"}]) mock_get_all.assert_called_once_with(filter=\"\\\"name='exPEcted'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_skip_autofix(self, mock_get_all): mock_get_all.return_value", "request_body = [{ \"op\": \"replace\", \"path\": \"/name\", \"value\": \"new_name\", }]", "mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with('/rest/testuri',", "= response_body new_resource = self.resource_client.create_with_zero_body(timeout=-1) self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(connection, \"post\") def", "filter = \"'name'='OneViewSDK \\\"Test FC Network'\" sort = \"name:ascending\" query", "self.connection._apiVersion = 200 self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with(uri, request_body, custom_headers={}) @mock.patch.object(Resource,", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch,", "resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_custom_headers(self, mock_post):", "@mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri =", "\"a_uri\", } mock_put.return_value = self.task, {} mock_wait4task.return_value = dict_to_update result", "mock_get.return_value = {} self.resource_helper.get_collection(filter=[\"name1=one\", \"name2=two\", \"name=three\"]) mock_get.assert_called_once_with(self.URI + \"?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection,", "\"uri\": \"\"} try: self.resource_client.delete(dict_to_delete, False, -1) except exceptions.HPOneViewUnknownType as e:", "2) @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri')", "test_get_collection_with_none(self): try: self.resource_client.get_collection(None) except ValueError as e: self.assertTrue(\"id\" in e.args[0])", "self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value = None, {} try: self.resource_client.create_report(\"/rest/path/create-report\")", "self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', -1) mock_wait4task.assert_called_once_with(self.task, mock.ANY) def test_delete_with_none(self):", "mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create =", "test_update_with_custom_headers(self, mock_put): dict_to_update = {\"name\": \"test\"} mock_put.return_value = None, self.response_body", "} mock_put.return_value = self.task, {} mock_wait4task.return_value = dict_to_update result =", "result_list = merge_default_values([resource1, resource2], default_type) expected_list = [ {\"name\": \"resource1\",", "test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client.build_uri('/rest/') except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else:", "ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor,", "self.resource_client.create_with_zero_body(timeout=-1) self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(connection, \"post\") def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value =", "default_values) self.assertEqual(result, expected) def test_should_not_merge_when_default_values_not_defined(self): resource = {'name': 'resource1'} default_values", "self.resource_client.get_all() expected_items = [{'id': '1'}, {'id': '2'}, {'id': '3'}, {'id':", "RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method)", "= \"/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True\" self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_delete.assert_called_once_with(uri) @mock.patch.object(connection, 'delete') def test_delete_all_should_return_true(self,", "dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_api_version_200(self, mock_post): dict_to_create = {\"resource_name\":", "\"post\") def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create = {\"resource_name\": \"a name\", \"type\":", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): response_body = {\"resource_name\":", "exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def", "= {\"uri\": \"/rest/testuri\"} result = self.resource_client.delete() self.assertTrue(result) @mock.patch.object(connection, 'delete') def", "mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce = mock.Mock() uri = \"/rest/testuri/\" filepath", "[{ \"op\": \"replace\", \"path\": \"/name\", \"value\": \"new_name\", }] mock_patch.return_value =", "= response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection,", "def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client.build_uri('/rest/') except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0])", "{'nextPageUri': None, 'members': [{'id': '7'}]}] mock_get.side_effect = results result =", "option[\"path\"]) self.assertEqual(uri, option[\"uri\"]) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client._helper.build_subresource_uri(None, \"123456\", \"sub-path\") except", "@mock.patch.object(connection, 'post') def test_create_uri(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"}", "message = \"Unrecognized URI for this resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\"", "= \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, fake_response_body result = self.resource_client.upload(filepath, uri)", "fake_response_body result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\"))", "\"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v300(self, mock_patch, mock_ensure_resource): request_body = [{", "self.DEFAULT_VALUES self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client._merge_default_values() self.task = {\"task\": \"task\",", "\"en_US\"}) @mock.patch.object(connection, \"post\") def test_create_should_return_new_resource_instance(self, mock_post): mock_post.return_value = {}, {}", "\"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_custom_headers(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value =", "e.args[0]) else: self.fail() @mock.patch.object(connection, 'delete') def test_delete_with_dict_uri(self, mock_delete): resource =", "self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value =", "[] response = self.resource_client.get_by_name(\"Resource Name,\") self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(connection,", "= [\"/rest/testuri?start=0&count=3\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\":", "[{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}, {\"id\": \"4\"}, {\"id\": \"5\"},", "name\", \"uri\": \"a_uri\", } mock_put.return_value = self.task, {} mock_wait4task.return_value =", "def test_create_with_none(self): try: self.resource_client.create(None) except ValueError as e: self.assertTrue(\"Resource\" in", "'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task,", "= { \"200\": {\"type\": TYPE_V200}, \"300\": {\"type\": TYPE_V300} } def", "self.task, {} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\") mock_post.assert_called_once_with(\"/rest/path/create-report\", {}) @mock.patch.object(connection, 'post')", "RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method) class StubResourceFileHandler(ResourceFileHandlerMixin, Resource): \"\"\"Stub", "a maximum number of results to be returned but for", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_without_default_values(self, mock_put, mock_ensure_resource): uri =", "options: uri = self.resource_client._helper.build_subresource_uri(option[\"resource\"], option[\"subresource\"], option[\"path\"]) self.assertEqual(uri, option[\"uri\"]) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self):", "def test_create_without_default_values(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value =", "'/rest/testuri?start=6&count=1'] results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'},", "\"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, fake_response_body result =", "self.task mock_wait4task.return_value = response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result,", "path='sub', uri='/rest/testuri/1/sub/2'), dict( resource='/rest/testuri/3', subresource='4', path='sub', uri='/rest/testuri/3/sub/4'), dict( resource='5', subresource='/rest/testuri/5/sub/6',", "except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def", "\"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( uri, dict_info,", "def test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value = self.task, {} mock_wait4task.return_value =", "def test_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None, self.response_body filter = \"name='Exchange", "[{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}] mock_get.return_value = { \"nextPageUri\":", "result = self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_false_when_error(self,", "was not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri( \"/rest/test/another/resource/uri/09USE7335NW3\") except exceptions.HPOneViewUnknownType", "this case, the user provides a maximum number of results", "200 self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with(uri, request_body, custom_headers={}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "response_body = {\"resource_name\": \"name\"} mock_put.return_value = self.task, self.task mock_wait4task.return_value =", "self.task, self.task mock_wait4task.return_value = response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1)", "= {}, {} self.connection._apiVersion = 200 self.resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value',", "def test_patch_return_entity(self, mock_wait4task, mock_patch): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value =", "= self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input = '/rest/testuri/09USE7335NW3' expected_output", "\"path\": \"/name\", \"value\": \"new_name\", }] mock_patch.return_value = {}, {} self.resource_client.patch(\"replace\",", "uri = \"/rest/testuri\" expected = {\"name\": \"test\", \"uri\": uri, \"type\":", "'replace', 'path': '/name', 'value': 'new_name', }] mock_patch.return_value = {}, {}", "mock_ensure_resource): dict_to_update = {\"name\": \"test\"} uri = \"/rest/testuri\" mock_put.return_value =", "def test_create_return_entity(self, mock_wait4task, mock_post): dict_to_create = { \"resource_name\": \"a name\",", "results result = self.resource_client.get_all(count=15) expected_items = [{\"id\": \"1\"}, {\"id\": \"2\"},", "\"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_uri(self, mock_wait4task, mock_update, mock_ensure_resource): uri =", "self.assertTrue(result) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_should_wait_for_task(self, mock_wait4task, mock_delete): mock_delete.return_value", "raised\") def test_build_subresource_uri(self): options = [ dict( resource=\"1\", subresource=\"2\", path=\"sub\",", "{\"resource_name\": \"a name\"} mock_post.return_value = {}, {} expected_dict = {\"resource_name\":", "mock_wait4task, mock_patch): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task, self.task", "= \"/rest/testuri\" mock_put.return_value = None, self.response_body expected = {\"name\": \"test\",", "@mock.patch.object(Resource, \"ensure_resource_data\") def test_ensure_resource_should_call_once(self, mock_do_put, mock_ensure_resource): self.resource_client.data = {\"uri\": \"/rest/test\"}", "'get') def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results", "\"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"}, {\"id\": \"2\"},", "fake_associated_resurce result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor,", "= {\"name\": \"test\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY,", "mock_get.assert_called_once_with(expected_uri) class ResourceSchemaMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client", "Unless required by applicable law or agreed to in writing,", "\"expected\"}, {\"name\": \"not expected\"}] response = self.resource_client.get_by('connection.name', 'expected') self.assertEqual(response, [{'name':", "= self.task self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "mock_get_all.assert_called_once_with(filter=\"\\\"connection.name='expected'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with_uri(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/testuri/5435534/sub')", "= 200 self.resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language':", "self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'get') def test_get_utilization_with_args(self, mock_get):", "= StubResource(self.connection) super(ResourceTest, self).setUp(self.resource_client) self.resource_helper = ResourceHelper(self.URI, self.connection, None) @mock.patch.object(ResourceHelper,", "\\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\ \"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource,", "[{\"member\": \"member\"}]} result = self.resource_helper.get_all( 1, 500, filter, query, sort)", "{\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body response", "= {\"resource_name\": \"name\"} mock_post.return_value = self.task, self.task mock_wait4task.return_value = response_body", "self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'post') def test_create_uri(self, mock_post):", "None, self.response_body expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V300, \"uri\": uri}", "= None, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "def test_patch_request_custom_headers(self, mock_task, mock_patch): dict_info = {\"resource_name\": \"a name\"} mock_patch.return_value", "mock_post): dict_to_create = [{\"resource_name\": \"a name\"}] mock_post.return_value = {}, {}", "self.resource_client.get_by_id(\"123\") mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_by_id_without_result(self, mock_get): mock_get.return_value = []", "the specific language governing permissions and # limitations under the", "dict( resource=None, subresource=\"/rest/testuri/9/sub/10\", path=\"sub\", uri=\"/rest/testuri/9/sub/10\"), dict( resource=\"/rest/testuri/11\", subresource=\"12\", path=\"/sub/\", uri=\"/rest/testuri/11/sub/12\"),", "self.resource_client.build_uri(None) except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception", "= 200 self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz',", "\"get\") def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results", "test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\":", "self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri(self, mock_get):", "= io.StringIO() result = self.resource_client.download(uri, file_path) self.assertFalse(result) def test_transform_list_to_dict(self): list", "['one', 'two', {'tree': 3}, 'four', 5] dict_transformed = transform_list_to_dict(list=list) self.assertEqual(dict_transformed,", "name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection,", "] for option in options: uri = self.resource_client.build_subresource_uri(option['resource'], option['subresource'], option['path'])", "in e.args[0]) else: self.fail() def test_get_with_none(self): try: self.resource_client.get(None) except ValueError", "self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_update_with_none(self): try: self.resource_client.update(None) except", "mock_wait4task, mock_post_multipart): fake_associated_resurce = mock.Mock() uri = '/rest/testuri/' filepath =", "from hpOneView import exceptions from hpOneView.resources.resource import (ResourceClient, ResourceHelper, ResourceFileHandlerMixin,", "force=True, timeout=-1) self.assertEqual(self.task, delete_task) mock_delete.assert_called_once_with(self.URI + \"/1?force=True\", custom_headers=None) @mock.patch.object(connection, 'delete')", "def test_get_by_with_incorrect_result_skip_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\": \"expected\"}, {\"name\": \"not expected\"}]", "import (ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID,", "test_delete_with_none(self): try: self.resource_client.delete(None) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0])", "'get_completed_task') def test_create_report_should_return_output_list_when_results(self, mock_get_completed_task, mock_post): task_output = [ {\"type\": \"FCIssueResponseV2\",", "= '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath,", "filter='startDate=2016-05-30T03:29:42.361Z', refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower'", "None, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection,", "mock_put, mock_ensure_resource): response_body = {\"resource_name\": \"name\"} self.resource_client.URI = \"/rest/enclosures\" mock_put.return_value", "exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try:", "Exception was not raised\") def test_build_uri_with_id_should_work(self): input = '09USE7335NW35' expected_output", "{\"uri\": \"/rest/testuri\"} self.resource_client._merge_default_values() self.task = {\"task\": \"task\", \"taskState\": \"Finished\"} self.response_body", "@mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\"", "{ \"200\": {\"type\": TYPE_V200}, \"300\": {\"type\": TYPE_V300} } def setUp(self,", "response_body new_resource = self.resource_client.create_with_zero_body(timeout=-1) self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(connection, \"post\") def test_create_with_zero_body_without_task(self,", "= \"/rest/testuri\" dict_to_update = {\"name\": \"test\", \"type\": \"typeV300\"} self.resource_client.data =", "'nextPageUri': uri, 'members': members, 'uri': uri } result = self.resource_client.get_all()", "'members': []} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'get') def", "\"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post')", "except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() @mock.patch.object(connection,", "expected_dict, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_should_not_override_resource_properties(self, mock_put, mock_ensure_resource):", "{} self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with( \"/rest/testuri\", request_body, custom_headers={\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource,", "result = self.resource_client.get_all() expected_items = [{'id': '1'}, {'id': '2'}, {'id':", "self.task self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource,", "self.resource_client.get_all(uri='/rest/testuri/12467836/subresources') uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri_and_query_string(self, mock_get):", "\"/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True\" self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_delete.assert_called_once_with(uri) @mock.patch.object(connection, 'delete') def test_delete_all_should_return_true(self, mock_delete):", "= updated_data self.resource_client.refresh() self.assertEqual(self.resource_client.data, updated_data) @mock.patch.object(connection, \"post\") def test_create_uri(self, mock_post):", "@mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): uri", "self.task, self.response_body mock_wait4task.return_value = self.task filter = \"name='Exchange Server'\" delete_task", "[ {\"name\": \"resource1\", \"type\": \"type1\"}, {\"name\": \"resource2\", \"type\": \"type1\"} ]", "@mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value =", "dict_to_update) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value", "[\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"},", "mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task delete_task = self.resource_client.delete('1',", "'') def test_extract_id_from_uri_passing_id(self): uri = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id,", "\"en_US\"}) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body", "self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(connection, \"post\") def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None,", "= \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection,", "test_transform_list_to_dict(self): list = ['one', 'two', {'tree': 3}, 'four', 5] dict_transformed", "[]} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_no_members(self,", "= None, self.response_body self.resource_client.data = {\"uri\": \"/rest/testuri\"} result = self.resource_client.delete()", "result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client._helper.build_uri(None) except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID,", "@mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce", "__init__(self, con): self._connection = con self._client = ResourceClient(con, \"/rest/fake/resource\") def", "uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream):", "(C) Copyright [2019] Hewlett Packard Enterprise Development LP # #", "self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def", "'200': {\"type\": \"EnclosureGroupV200\"}, '300': {\"type\": \"EnclosureGroupV300\"} } expected = {'name':", "= [ dict( resource=\"1\", subresource=\"2\", path=\"sub\", uri=\"/rest/testuri/1/sub/2\"), dict( resource=\"/rest/testuri/3\", subresource=\"4\",", "\"\"\" uri_list = [\"/rest/testuri?start=0&count=3\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1],", "Name,\") mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(Resource, \"get_by\") def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value", "= \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\ \"&view=day\"", "self.assertEqual([{\"member\": \"member\"}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all()", "mock_get): try: self.resource_helper.get_all(uri=\"/rest/other/resource/12467836/subresources\") except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else:", "self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with('/rest/testuri', mock.ANY, mock.ANY) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get')", "'get') def test_get_with_uri_should_work(self, mock_get): mock_get.return_value = {} uri = self.URI", "mock_get_by.return_value = [] response = self.resource_client.get_by_name(\"Resource Name,\") self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", \"Resource", "else: self.fail(\"Expected Exception was not raised\") def test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self): message =", "results self.resource_client.get_all(count=15) expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection,", "exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try:", "case, the user provides a maximum number of results to", "self.resource_client.update(dict_to_update, uri=uri, force=True) expected_uri = \"/rest/resource/test?force=True\" mock_put.assert_called_once_with(expected_uri, dict_to_update, custom_headers=None) @mock.patch.object(connection,", "} mock_post.return_value = self.task, {} mock_wait4task.return_value = created_resource result =", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update,", "user provides a maximum number of results to be returned", "= self.resource_client.delete() self.assertTrue(result) @mock.patch.object(connection, 'delete') def test_helper_delete_all_should_return_true(self, mock_delete): mock_delete.return_value =", "\"type1\"}, {\"name\": \"resource2\", \"type\": \"type1\"} ] self.assertEqual(result_list, expected_list) def test_raise_unavailable_method_exception(self):", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task", "self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body() mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None)", "\"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) class ResourceSchemaMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300)", "delete_task = self.resource_client.delete('1', force=True, timeout=-1) self.assertEqual(self.task, delete_task) mock_delete.assert_called_once_with(self.URI + \"/1?force=True\",", "\"get\") def test_get_all_called_once(self, mock_get): filter = \"'name'='OneViewSDK \\\"Test FC Network'\"", "test_create_return_entity(self, mock_wait4task, mock_post): dict_to_create = { \"resource_name\": \"a name\", }", "self.response_body mock_wait4task.return_value = self.task delete_task = self.resource_client.delete('1', force=True, timeout=-1) self.assertEqual(self.task,", "{}, {} delete_result = self.resource_client.delete(resource) self.assertTrue(delete_result) mock_delete.assert_called_once_with(\"uri\", custom_headers=None) def test_delete_with_empty_dict(self):", "'/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\", "{\"name\": \"test\", \"type\": \"anotherType\"} uri = \"/rest/resource/test\" mock_put.return_value = None,", "default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_without_default_values(self, mock_put): dict_to_update", "mock_post_multipart): uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task,", "test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{\"nextPageUri\":", "the user provides a maximum number of results to be", "def test_get_with_uri_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI for this resource\" uri", "view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true'", "self.assertEqual(result, response_body) @mock.patch.object(connection, \"put\") def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None,", "mock_get): filter = \"'name'='OneViewSDK \\\"Test FC Network'\" sort = \"name:ascending\"", "{\"key\": \"value\"}]} collection = self.resource_client.get_collection('12345') self.assertEqual(len(collection), 2) @mock.patch.object(ResourceClient, 'get_all') def", "'op': 'replace', 'path': '/name', 'value': 'new_name', }] mock_patch.return_value = {},", "self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") def test_create_should_return_new_resource_instance(self,", "custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v300(self, mock_patch):", "resource=\"/rest/testuri/3\", subresource=\"4\", path=\"sub\", uri=\"/rest/testuri/3/sub/4\"), dict( resource=\"5\", subresource=\"/rest/testuri/5/sub/6\", path=\"sub\", uri=\"/rest/testuri/5/sub/6\"), dict(", "not raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client._helper.build_uri('') except ValueError as exception:", "@mock.patch.object(connection, 'get') def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3']", "option[\"subresource\"], option[\"path\"]) self.assertEqual(uri, option[\"uri\"]) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client._helper.build_subresource_uri(None, \"123456\", \"sub-path\")", "\"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri", "'/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '') def test_extract_id_from_uri_passing_id(self): uri =", "not raised\") @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_do_post_request(self, mock_get_completed_task, mock_post):", "self.task, mock.Mock() mock_wait4task.return_value = fake_associated_resurce result = self.resource_client.upload(filepath, uri) self.assertEqual(result,", "self.assertFalse(result) class ResourceZeroBodyMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client", "self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourcePatch(self.connection) super(ResourcePatchMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource,", "{'id': '3'}] mock_get.return_value = { 'nextPageUri': uri, 'members': members, 'uri':", "language governing permissions and # limitations under the License. ###", "con): self._connection = con self._client = ResourceClient(con, \"/rest/fake/resource\") def get_fake(self,", "test_delete_should_return_true(self, mock_delete, mock_ensure_resource): mock_delete.return_value = None, self.response_body self.resource_client.data = {\"uri\":", "self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'post') def test_create_uri(self, mock_post): dict_to_create = {\"resource_name\":", "@mock.patch.object(connection, \"get\") def test_get_utilization_with_args(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization(fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=\"startDate=2016-05-30T03:29:42.361Z\", refresh=True, view=\"day\")", "test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = '/rest/testuri?start=0&count=-1' members = [{'id': '1'}, {'id':", "self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_post.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", {},", "expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\", "dict_to_update = {\"name\": \"test\", \"type\": \"anotherType\"} uri = \"/rest/resource/test\" mock_put.return_value", "uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock()", "self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_should_not_override_resource_properties(self,", "mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"name\": \"test\", \"type\": \"typeV300\"}", "'get_completed_task') def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch): dict_info = {\"resource_name\": \"a name\"}", "resource_client = ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with('/rest/testuri/id', dict_info, custom_headers=headers)", "def test_get_collection_with_none(self): try: self.resource_client.get_collection(None) except ValueError as e: self.assertTrue(\"id\" in", "custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post):", "mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {'nextPageUri': None,", "= self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'put') def test_update_with_zero_body_without_task(self,", "dict_info, custom_headers={\"Extra\": \"extra\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor,", "test_update_with_custom_headers(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\"} mock_put.return_value = None,", "None, self.response_body self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\")", "= self.task self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, self.resource_client.data) mock_update.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource,", "def test_patch_with_custom_headers_v300(self, mock_patch): mock_patch.return_value = {}, {} resource_client = ResourceClient(self.connection,", "resource2], default_type) expected_list = [ {\"name\": \"resource1\", \"type\": \"type1\"}, {\"name\":", "@mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_with_result(self, mock_get_by): mock_get_by.return_value = [{\"name\": \"value\"}] response", "test_update_with_none(self): try: self.resource_client.update(None) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0])", "self.resource_client.get_collection('12345', ['name1=one', 'name2=two', 'name=three']) mock_get.assert_called_once_with(self.URI + \"/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, 'get') def", "'/rest/testuri/123a53cz', request_body, custom_headers={}) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v300(self, mock_patch): request_body =", "test_update_with_zero_body_called_once(self, mock_wait4task, mock_update): mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task", "{} expected = {\"resource_name\": \"a name\", \"type\": \"anotherType\"} self.resource_client.create(dict_to_create) mock_post.assert_called_once_with(self.URI,", "[] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(ResourceHelper, \"do_get\") @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_should_update_resource_data(self,", "\"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, \"post_multipart_with_response_handling\")", "dict( resource=\"/rest/testuri/11\", subresource=\"12\", path=\"/sub/\", uri=\"/rest/testuri/11/sub/12\"), dict( resource=\"/rest/testuri/13\", subresource=None, path=\"/sub/\", uri=\"/rest/testuri/13/sub\"),", "object type\", e.args[0]) else: self.fail() @mock.patch.object(connection, 'get') def test_get_schema_uri(self, mock_get):", "Packard Enterprise Development LP # # Licensed under the Apache", "result) def test_build_uri_with_uri_should_work(self): input = \"/rest/testuri/09USE7335NW3\" expected_output = \"/rest/testuri/09USE7335NW3\" result", "mock.ANY) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path =", "= ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1], 'members': [{'id':", "@mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v300(self, mock_patch, mock_ensure_resource): mock_patch.return_value = {}, {}", "mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_without_default_values(self, mock_post): dict_to_create =", "= {}, {} self.resource_client.patch( '/rest/testuri/123a53cz', 'replace', '/name', 'new_name', 60) mock_patch.assert_called_once_with(", "def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock()", "'WrongName'\" view = '\"{view-name}\"' scope_uris = '/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a' mock_get.return_value = {\"members\":", "fake_associated_resurce) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body", "mock.Mock() uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None,", "= {}, {} new_instance = self.resource_client.create({}) self.assertNotEqual(self.resource_client, new_instance) @mock.patch.object(connection, \"post\")", "{'id': '2'}, {'id': '3'}, {'id': '4'}, {'id': '5'}, {'id': '6'},", "expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_default_api_version_300(self, mock_put): dict_to_update = {\"name\":", "mock.Mock() uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task,", "= self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\":", "unavailable_method) class StubResourceFileHandler(ResourceFileHandlerMixin, Resource): \"\"\"Stub class to test resource file", "mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_helper.get_collection() mock_get.assert_called_once_with(self.URI) @mock.patch.object(connection,", "body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with(uri, dict_info, custom_headers=headers) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor,", "def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceSchema(self.connection) super(ResourceSchemaMixinTest,", "Resource): \"\"\"Stub class to test resource patch operations\"\"\" class StubResourceUtilization(ResourceUtilizationMixin,", "@mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In this case, the", "def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") @mock.patch.object(connection, 'get') def", "= None, self.response_body self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection,", "test_create_report_should_wait_task_completion(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value", "def test_get_utilization_with_multiple_filters(self, mock_get): self.resource_client.get_utilization( '09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter=['startDate=2016-05-30T03:29:42.361Z', 'endDate=2016-05-31T03:29:42.361Z'], refresh=True, view='day')", "test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' fake_file", "= {}, {} self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with( \"/rest/testuri\", request_body, custom_headers={\"Content-Type\":", "mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") class ResourceTest(BaseTest): def setUp(self): self.connection", "expected_output = \"/rest/testuri/09USE7335NW3\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self):", "= results result = self.resource_client.get_all(count=15) expected_items = [{'id': '1'}, {'id':", "= io.StringIO() mock_open.return_value = fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file,", "\"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None,", "= None, self.response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body)", "test_get_utilization_by_uri_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) class ResourceSchemaMixinTest(BaseTest):", "uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.connection._apiVersion = 200", "[]) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {\"nextPageUri\": None,", "test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch): dict_info = {\"resource_name\": \"a name\"} mock_patch.return_value =", "from tests.test_utils import mock_builtin from hpOneView.connection import connection from hpOneView", "\"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() self.resource_client.upload(filepath, uri)", "{\"id\": \"5\"}, {\"id\": \"6\"}]}, {'nextPageUri': None, \"members\": [{\"id\": \"7\"}, {\"id\":", "= {\"resource_name\": \"a name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict,", "@mock.patch.object(connection, \"get\") def test_get_by_id_without_result(self, mock_get): mock_get.return_value = [] response =", "def test_get_collection_should_return_list(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]}", "'get') def test_get_all_with_custom_uri(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources') uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection,", "'get_all') def test_get_by_property_with__invalid_uri(self, mock_get_all): try: self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/other/5435534/sub') except exceptions.HPOneViewUnknownType", "{}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post):", "input = '09USE7335NW35' expected_output = '/rest/testuri/09USE7335NW35' result = self.resource_client.build_uri(input) self.assertEqual(expected_output,", "None, 'members': [{'id': '7'}]}] mock_get.side_effect = results result = self.resource_client.get_all(count=15)", "= {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} collection = self.resource_client.get_collection('12345') self.assertEqual(len(collection),", "self.response_body self.resource_client.update(dict_to_update, uri=uri, force=True) expected_uri = \"/rest/resource/test?force=True\" mock_put.assert_called_once_with(expected_uri, dict_to_update, custom_headers=None)", "result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def", "{'tree': 3}, 'four', 5] dict_transformed = transform_list_to_dict(list=list) self.assertEqual(dict_transformed, {'5': True,", "filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called()", "UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method) class", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task", "mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources') uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri_and_query_string(self,", "60) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v200(self, mock_patch): request_body = [{ 'op':", "\"300\": {\"type\": TYPE_V300} } def setUp(self, resource_client=None): self.resource_client = resource_client", "= {\"body\": \"body\"} self.custom_headers = {\"Accept-Language\": \"en_US\"} class ResourceFileHandlerMixinTest(BaseTest): def", "'post') def test_create_uri_with_force(self, mock_post): dict_to_create = {\"resource_name\": \"a name\", \"force\":", "task_with_output = self.task.copy() task_with_output['taskOutput'] = task_output mock_post.return_value = self.task, {}", "@mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri =", "resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart(self, mock_post_multipart): uri", "uri) mock_post_multipart.assert_called_once_with(uri, filepath, 'SPPgen9snap6.2015_0405.81.iso') @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath", "\"resource_name\": \"a name\", \"uri\": \"a_uri\", } mock_put.return_value = self.task, {}", "self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_return_resource_when_response_is_not_task(self,", "'expected') self.assertEqual(response, [{'name': 'expected'}, {'name': 'not expected'}]) mock_get_all.assert_called_once_with(filter=\"\\\"connection.name='expected'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient,", "uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, fake_response_body", "= \"/rest/testuri\" mock_post.assert_called_once_with(expected_uri, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_api_version_200(self, mock_post):", "'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value = self.task,", "mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get):", "def test_get_collection_with_filter(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=\"name=name\") mock_get.assert_called_once_with(self.URI + \"?filter=name%3Dname\")", "test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") class ResourceTest(BaseTest): def setUp(self):", "filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with(\"/rest/testuri\", mock.ANY,", "= None, self.response_body self.resource_client.URI = \"/rest/enclosures\" result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\",", "@mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch, mock_ensure_resource): entity", "name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\")", "\"name='Exchange Server'\" result = self.resource_helper.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(Resource, \"ensure_resource_data\")", "\"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\")", "resource=\"5\", subresource=\"/rest/testuri/5/sub/6\", path=\"sub\", uri=\"/rest/testuri/5/sub/6\"), dict( resource=\"/rest/testuri/7\", subresource=\"/rest/testuri/7/sub/8\", path=\"sub\", uri=\"/rest/testuri/7/sub/8\"), dict(", "custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") def test_create_should_return_new_resource_instance(self, mock_post): mock_post.return_value = {},", "@mock.patch.object(connection, 'get') def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") @mock.patch.object(connection,", "\"test\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\":", "def test_get_by_with_incorrect_result_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\": \"EXpected\"}, {\"name\": \"not expected\"}]", "= self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value = self.task, {} mock_get_completed_task.return_value", "uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" fake_resource = FakeResource(None) try: fake_resource.get_fake(uri) except exceptions.HPOneViewUnknownType", "self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( '/rest/testuri/id', dict_info, custom_headers={'Extra': 'extra', 'Content-Type':", "pagination purposes, a nextPageUri is returned by OneView. \"\"\" uri_list", "self.assertEqual(self.task, delete_task) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_by_id_called_once(self, mock_wait4task, mock_delete):", "\"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_force(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value =", "mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "You may obtain a copy of the License at #", "\"5\"}, {\"id\": \"6\"}]}, {\"nextPageUri\": None, \"members\": [{\"id\": \"7\"}]}] mock_get.side_effect =", "self.assertEqual(self.response_body, response) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_custom_headers(self, mock_put):", "'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value = self.task,", "This example is not supported yet uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/otherthing' extracted_id", "def test_get_collection_with_path(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(path=\"/test\") mock_get.assert_called_once_with(self.URI + \"/test\")", "else: self.fail() def test_create_with_none(self): try: self.resource_client.create(None) except ValueError as e:", "response = self.resource_client.get_by('name', 'exPEcted') self.assertEqual(response, [{\"name\": \"EXpected\"}]) mock_get_all.assert_called_once_with(filter=\"\\\"name='exPEcted'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient,", "with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_raise_resource_not_found_exception_without_uri(self, mock_get_by): self.resource_client.data =", "= self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( \"/rest/testuri\", {},", "test_patch_request_when_uri_is_provided(self, mock_patch): request_body = [{ 'op': 'replace', 'path': '/name', 'value':", "'path': '/name', 'value': 'new_name', }] mock_patch.return_value = {}, {} self.connection._apiVersion", "mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock() uri = \"/rest/testuri/\" filepath =", "timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_custom_headers(self, mock_post): dict_to_create", "@mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri", "{\"nextPageUri\": None, \"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results", "[2019] Hewlett Packard Enterprise Development LP # # Licensed under", "\"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"}, {\"id\":", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch,", "custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def", "self.resource_client.refresh() self.assertEqual(self.resource_client.data, updated_data) @mock.patch.object(connection, \"post\") def test_create_uri(self, mock_post): dict_to_create =", "else: self.fail('Expected Exception was not raised') @mock.patch.object(connection, 'get') def test_get_all_should_do_multi_requests_when_response_paginated(self,", "{} uri = self.URI + \"/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" self.resource_client.get(uri) mock_get.assert_called_once_with(uri) def test_get_with_uri_with_incompatible_url_shoud_fail(self):", "def test_get_utilization_by_id_with_defaults(self, mock_get): self.resource_client.get_utilization('09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get')", "= \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, fake_response_body result", "\"en_US\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_no_members(self, mock_get):", "@mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {\"nextPageUri\": None, \"members\":", "dict_info, custom_headers={'Extra': 'extra', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "'7'}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In", "\"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def", "exceptions.HPOneViewUnknownType as e: self.assertEqual(\"Unknown object type\", e.args[0]) else: self.fail() @mock.patch.object(connection,", "\\ '&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI) self.assertEqual([{'member': 'member'}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_defaults(self,", "'7'}]}] mock_get.side_effect = results result = self.resource_client.get_all() expected_items = [{'id':", "self.task self.resource_client.delete('1', custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={'Accept-Language': 'en_US'}) def test_delete_dict_invalid_uri(self): dict_to_delete =", "super(ResourceUtilizationMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_args(self, mock_get, mock_ensure_resource):", "} def setUp(self, resource_client=None): self.resource_client = resource_client self.resource_client.URI = self.URI", "mock_get.side_effect = results result = self.resource_client.get_all() expected_items = [{'id': '1'},", "self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_without_default_values(self,", "[]) @mock.patch.object(connection, 'post') def test_create_report_should_raise_exception_when_not_task(self, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput']", "test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): response_body = {\"resource_name\": \"name\"} self.resource_client.URI =", "\"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_uri_called_once(self, mock_put, mock_ensure_resource): uri", "= {} self.resource_helper.get_collection(filter=\"name=name\") mock_get.assert_called_once_with(self.URI + \"?filter=name%3Dname\") @mock.patch.object(connection, \"get\") def test_get_collection_with_path(self,", "uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '') def test_extract_id_from_uri_passing_id(self):", "None} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task')", "def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None, self.response_body result = self.resource_client.create_with_zero_body(", "\"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_id_with_defaults(self, mock_get, mock_ensure_resource):", "uri='/rest/testuri/9/sub/10'), dict( resource='/rest/testuri/11', subresource='12', path='/sub/', uri='/rest/testuri/11/sub/12'), dict( resource='/rest/testuri/13', subresource=None, path='/sub/',", "\"created\": \"2015-03-13T14: 10: 50.322Z\"} ] task_with_output = self.task.copy() task_with_output['taskOutput'] =", "'/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri, custom_headers=mock.ANY) @mock.patch.object(connection,", "= \"/rest/testuri\" dict_to_update = {\"resource_name\": \"a name\", \"uri\": uri} mock_put.return_value", "mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection,", "view, 'name,owner,modified', scope_uris=scope_uris) uri = '{resource_uri}?start=1' \\ '&count=500' \\ '&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27'", "\"7\"}]}] mock_get.side_effect = results result = self.resource_client.get_all(count=15) expected_items = [{\"id\":", "@mock.patch.object(connection, 'get') def test_get_with_uri_should_work(self, mock_get): mock_get.return_value = {} uri =", "custom_headers={\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v200(self, mock_patch, mock_ensure_resource):", "\"post\") def test_create_with_default_api_version_300(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value", "mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):", "= [ {\"name\": \"resource1\", \"type\": \"type1\"}, {\"name\": \"resource2\", \"type\": \"type1\"}", "\"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)", "= self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value", "= self.task update_task = self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, update_task) mock_update.assert_called_once_with(\"a_uri\", dict_to_update,", "e.args[0]) else: self.fail() def test_create_with_empty_dict(self): try: self.resource_client.create({}) except ValueError as", "def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results =", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "Network'\" sort = \"name:ascending\" query = \"name NE 'WrongName'\" mock_get.return_value", "'get') def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value') uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection,", "= self.task, self.response_body mock_wait4task.return_value = self.task filter = \"name='Exchange Server'\"", "option['path']) self.assertEqual(uri, option['uri']) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client.build_subresource_uri(None, \"123456\", 'sub-path') except", "list = ['one', 'two', {'tree': 3}, 'four', 5] dict_transformed =", "test_update_with_uri_called_once(self, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"name\": \"test\",", "mock_get): self.resource_client.get_by_uri(\"/rest/testuri\") mock_get.assert_called_once_with('/rest/testuri') @mock.patch.object(connection, \"get\") def test_get_by_id_with_result(self, mock_get): self.resource_client.get_by_id(\"123\") mock_get.assert_called_once_with(\"/rest/testuri/123\")", "call from tests.test_utils import mock_builtin from hpOneView.connection import connection from", "mock_get.assert_called_once_with(self.URI + \"/schema\") class ResourceTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1',", "'wait_for_task') def test_delete_all_should_wait_for_task(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value", "subresource='/rest/testuri/9/sub/10', path='sub', uri='/rest/testuri/9/sub/10'), dict( resource='/rest/testuri/11', subresource='12', path='/sub/', uri='/rest/testuri/11/sub/12'), dict( resource='/rest/testuri/13',", "@mock.patch.object(connection, \"get\") def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri = \"/rest/testuri/\"", "mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) expected_uri = \"/rest/testuri\" mock_put.assert_called_once_with(expected_uri, expected,", "\"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body = {\"resource_name\":", "\"/schema\") class ResourceTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client", "self.resource_client._helper.build_subresource_uri(None, \"123456\", \"sub-path\") except exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else:", "{'id': '3'}]}, {'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id':", "} created_resource = { \"resource_id\": \"123\", \"resource_name\": \"a name\", }", "self.fail() def test_update_with_none(self): try: self.resource_client.update(None) except ValueError as e: self.assertTrue(\"Resource\"", "self.fail(\"Expected Exception was not raised\") def test_build_uri_with_id_should_work(self): input = '09USE7335NW35'", "io.StringIO() result = self.resource_client.download(uri, file_path) self.assertFalse(result) def test_transform_list_to_dict(self): list =", "self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_not_wait_for_task_when_response_is_not_task(self,", "\"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=1\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"}, {\"id\":", "\"a name\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY,", "@mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v300(self, mock_patch): request_body = [{ 'op': 'replace',", "mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('1', custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection,", "subresource=\"/rest/testuri/5/sub/6\", path=\"sub\", uri=\"/rest/testuri/5/sub/6\"), dict( resource=\"/rest/testuri/7\", subresource=\"/rest/testuri/7/sub/8\", path=\"sub\", uri=\"/rest/testuri/7/sub/8\"), dict( resource=None,", "self.response_body response = self.resource_client.update(dict_to_update, uri=uri) self.assertEqual(self.response_body, response) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None)", "with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(ResourceHelper, \"do_get\") @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_should_update_resource_data(self, mock_do_get,", "mock_post): mock_post.return_value = self.task, {} mock_wait4task.return_value = self.task self.resource_client.create({\"test\": \"test\"},", "= resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) def test_should_not_merge_when_default_values_not_defined(self): resource = {'name':", "test_get_by_name_with_result(self, mock_get_by): mock_get_by.return_value = [{\"name\": \"value\"}] response = self.resource_client.get_by_name('Resource Name,')", "mock_get_by): get_by_return_value = [{\"name\": \"testname\", \"uri\": \"/rest/testuri\"}] self.resource_client.data = {\"name\":", "Resource): \"\"\"Stub class to test resource file operations\"\"\" class StubResourceZeroBody(ResourceZeroBodyMixin,", "\"5\"}, {\"id\": \"6\"}]}, {'nextPageUri': None, \"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}]", "self.resource_client.get_by_name(\"Resource Name,\") self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(connection, \"get\") def test_get_by_uri(self,", "mock_post_multipart.assert_called_once_with(\"/rest/testuri\", mock.ANY, mock.ANY) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def", "members, 'uri': uri } result = self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri)", "@mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v300(self, mock_patch): mock_patch.return_value = {}, {} resource_client", "'get') def test_get_utilization_by_id_with_defaults(self, mock_get): self.resource_client.get_utilization('09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection,", "'/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id':", "= task_with_output self.resource_client.create_report(\"/rest/path/create-report\") mock_post.assert_called_once_with(\"/rest/path/create-report\", {}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def", "uri, \"members\": members, \"uri\": uri } result = self.resource_client.get_all() self.assertSequenceEqual(result,", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_patch_return_entity(self, mock_wait4task, mock_patch,", "\"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): uri =", "@mock.patch.object(connection, 'get') def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources') except exceptions.HPOneViewUnknownType as", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "collection = self.resource_helper.get_collection() self.assertEqual(len(collection), 2) def test_build_uri_with_id_should_work(self): input = \"09USE7335NW35\"", "file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = True mock_open.return_value", "self.task, {} mock_wait4task.return_value = created_resource result = self.resource_client.create(dict_to_create, -1) self.assertEqual(result,", "{'name': 'resource1', \"type\": \"EnclosureGroupV300\"} resource_client = ResourceClient(self.connection, self.URI) result =", "@mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\"", "{\"id\": \"5\"}, {\"id\": \"6\"}, {\"id\": \"7\"}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, \"get\")", "@mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri =", "is not supported yet uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/otherthing' extracted_id = extract_id_from_uri(uri)", "License. # You may obtain a copy of the License", "self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_called_once(self, mock_wait4task,", "\\ '&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27' \\ '&query=name%20NE%20%27WrongName%27' \\ '&sort=name%3Aascending' \\ '&view=%22%7Bview-name%7D%22' \\ '&fields=name%2Cowner%2Cmodified'", "self.assertEqual(uri, option[\"uri\"]) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client._helper.build_subresource_uri(None, \"123456\", \"sub-path\") except exceptions.HPOneViewValueError", "try: self.resource_helper.get_all(uri=\"/rest/other/resource/12467836/subresources\") except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail(\"Expected", "self.response_body new_resource = self.resource_client.create_with_zero_body() self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\")", "{}, {} expected = {\"resource_name\": \"a name\", \"type\": \"anotherType\"} self.resource_client.create(dict_to_create)", "super(ResourceSchemaMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"get\") def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI +", "= \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\"", "self.assertTrue(result) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_force(self, mock_ensure_resource,", "class ResourceClientTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200 = 'typeV200' TYPE_V300 =", "self.resource_client.delete(None) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail()", "def test_patch_with_custom_headers_v300(self, mock_patch, mock_ensure_resource): mock_patch.return_value = {}, {} self.resource_client.patch(\"operation\", \"/field\",", "mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_default_api_version_300(self, mock_post): dict_to_create =", "= {\"members\": [{\"member\": \"member\"}]} result = self.resource_helper.get_all( 1, 500, filter,", "'2'}, {'id': '3'}, {'id': '4'}, {'id': '5'}, {'id': '6'}, {'id':", "None, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection,", "mock_post): mock_post.return_value = {}, {} new_instance = self.resource_client.create({}) self.assertNotEqual(self.resource_client, new_instance)", "self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):", "def test_create_report_should_return_output_list_when_results(self, mock_get_completed_task, mock_post): task_output = [ {\"type\": \"FCIssueResponseV2\", \"created\":", "mock_delete.assert_called_once_with(mock.ANY, custom_headers={'Accept-Language': 'en_US'}) def test_delete_dict_invalid_uri(self): dict_to_delete = {\"task\": \"task\", \"uri\":", "@mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = '/rest/testuri?start=0&count=-1' members =", "self.connection._apiVersion = 200 self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\":", "query = \"name NE 'WrongName'\" mock_get.return_value = {\"members\": [{\"member\": \"member\"}]}", "self.resource_client.data = {\"name\": \"testname\"} mock_do_get.return_value = get_by_return_value self.resource_client.ensure_resource_data(update_data=True) self.assertEqual(self.resource_client.data, get_by_return_value[0])", "mock_post_multipart.assert_called_once_with(uri, filepath, \"SPPgen9snap6.2015_0405.81.iso\") @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath =", "= connection('127.0.0.1', 300) self.resource_client = StubResourceZeroBody(self.connection) super(ResourceZeroBodyMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post\")", "'wait_for_task') def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value", "\"1\"} expected_resource = {\"name\": \"resource2\", \"type\": \"resource\", \"port\": \"1\"} merged_resource", "self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('1', custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language':", "= extract_id_from_uri(uri) self.assertEqual(id, extracted_id) def test_extract_id_from_uri_with_extra_slash(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/' extracted_id", "def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345', ['name1=one', 'name2=two', 'name=three'])", "task_with_output = self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value = self.task, {}", "\"uri\": uri} expected = {\"resource_data\": \"resource_data\", \"uri\": uri, \"type\": \"typeV300\"}", "exceptions from hpOneView.resources.resource import (ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin,", "uri='/rest/testuri/13/sub'), ] for option in options: uri = self.resource_client.build_subresource_uri(option['resource'], option['subresource'],", "self.TYPE_V200} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def", "Resource): \"\"\"Stub class to test resource schema methods\"\"\" class StubResource(Resource):", "\"EnclosureGroupV200\"}, '300': {\"type\": \"EnclosureGroupV300\"} } expected = {'name': 'resource1', \"type\":", "class ResourceSchemaMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client =", "= self.resource_client.delete(resource) self.assertTrue(delete_result) mock_delete.assert_called_once_with(\"uri\", custom_headers=None) def test_delete_with_empty_dict(self): try: self.resource_client.delete({}) except", "= io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def", "def test_update_with_default_api_version_300(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\"} uri =", "mock_task, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" dict_info = {\"resource_name\": \"a", "resource=\"/rest/testuri/13\", subresource=None, path=\"/sub/\", uri=\"/rest/testuri/13/sub\"), ] for option in options: uri", "= {}, {} self.connection._apiVersion = 200 self.resource_client.patch( '123a53cz', 'replace', '/name',", "@mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\"", "def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\"", "= {\"resource_name\": \"a name\", \"type\": \"anotherType\"} mock_post.return_value = {}, {}", "self.connection._apiVersion = 300 resource_client = ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers)", "test_extract_id_from_uri_passing_id(self): uri = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '3518be0e-17c1-4189-8f81-83f3724f6155') def", "self).setUp(self.resource_client) @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart(self, mock_post_multipart): uri = \"/rest/testuri/\" filepath", "mock_get): mock_get.return_value = {} self.resource_helper.get_collection(path=\"/test\") mock_get.assert_called_once_with(self.URI + \"/test\") @mock.patch.object(connection, \"get\")", "def test_get_utilization_by_uri_with_defaults(self, mock_get): self.resource_client.get_utilization('/rest/testuri/09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) def test_get_utilization_with_empty(self):", "custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_request_when_uri_is_provided(self, mock_patch): request_body = [{", "= [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_raise_resource_not_found_exception_without_uri(self, mock_get_by):", "def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = '/rest/testuri?start=0&count=-1' members = [{'id': '1'},", "@mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources?param=value\") uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri)", "uri = '{resource_uri}?start=1' \\ '&count=500' \\ '&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27' \\ '&query=name%20NE%20%27WrongName%27' \\", "self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(connection, 'get') def test_get_collection_uri(self, mock_get): mock_get.return_value", "test_get_by_with_incorrect_result_skip_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\": \"expected\"}, {\"name\": \"not expected\"}] response", "test_create_should_not_override_resource_properties(self, mock_post): dict_to_create = {\"resource_name\": \"a name\", \"type\": \"anotherType\"} mock_post.return_value", "None, self.response_body resource_client = ResourceClient(self.connection, self.URI) resource_client.update(dict_to_update, uri=uri) mock_put.assert_called_once_with(uri, dict_to_update,", "\"/rest/testuri\" TYPE_V200 = 'typeV200' TYPE_V300 = 'typeV300' DEFAULT_VALUES = {", "def test_update_with_force(self, mock_put, mock_laod_resource): dict_to_update = {\"name\": \"test\"} uri =", "raised\") @mock.patch.object(connection, \"get\") def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\",", "\\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\ '&view=day'", "Resource): \"\"\"Stub class to test resoruce zero body methods\"\"\" class", "mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources\") uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\"", "= None, self.response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body)", "self.resource_helper.get_collection(path=\"/test\") mock_get.assert_called_once_with(self.URI + \"/test\") @mock.patch.object(connection, \"get\") def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value", "name\", } created_resource = { \"resource_id\": \"123\", \"resource_name\": \"a name\",", "expected = {\"resource_data\": \"resource_data\", \"uri\": uri, \"type\": \"typeV300\"} mock_update.return_value =", "def test_build_uri_with_id_should_work(self): input = '09USE7335NW35' expected_output = '/rest/testuri/09USE7335NW35' result =", "'300': {\"type\": \"EnclosureGroupV300\"} } expected = {'name': 'resource1', \"type\": \"EnclosureGroupV300\"}", "= {\"resource_name\": \"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict,", "mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value =", "= {\"name\": \"test\", \"type\": self.TYPE_V300, \"uri\": uri} self.resource_client._merge_default_values() self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri,", "{\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.connection._apiVersion", "merge_resources, merge_default_values, unavailable_method) class StubResourceFileHandler(ResourceFileHandlerMixin, Resource): \"\"\"Stub class to test", "self.response_body) @mock.patch.object(connection, 'put') def test_update_with_uri_called_once(self, mock_put): dict_to_update = {\"name\": \"test\"}", "self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get') def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1',", "mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch(\"replace\", \"/name\", \"new_name\")", "self.assertEqual(result_list, expected_list) def test_raise_unavailable_method_exception(self): self.assertRaises(exceptions.HPOneViewUnavailableMethod, unavailable_method) class FakeResource(object): def __init__(self,", "'&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_id_with_defaults(self, mock_get): self.resource_client.get_utilization('09USE7335NW3')", "'4'}, {'id': '5'}, {'id': '6'}]}, {'nextPageUri': None, 'members': [{'id': '7'},", "uri='/rest/testuri/3/sub/4'), dict( resource='5', subresource='/rest/testuri/5/sub/6', path='sub', uri='/rest/testuri/5/sub/6'), dict( resource='/rest/testuri/7', subresource='/rest/testuri/7/sub/8', path='sub',", "= \"/rest/testuri\" TYPE_V200 = \"typeV200\" TYPE_V300 = \"typeV300\" DEFAULT_VALUES =", "None, self.response_body response = self.resource_client.update(dict_to_update, uri=uri) self.assertEqual(self.response_body, response) mock_put.assert_called_once_with(uri, dict_to_update,", "@mock.patch.object(connection, 'get') def test_get_utilization_with_args(self, mock_get): self.resource_client.get_utilization('09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter='startDate=2016-05-30T03:29:42.361Z', refresh=True, view='day')", "mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_with_multiple_filters(self, mock_get): self.resource_client.get_utilization( '09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter=['startDate=2016-05-30T03:29:42.361Z',", "mock_open.return_value = fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, uri, custom_headers=mock.ANY)", "'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value = self.task,", "self.resource_client.get_all(count=15) expected_items = [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}, {\"id\":", "def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update): mock_update.return_value = self.task, self.task mock_wait4task.return_value =", "\"value\"}, {\"key\": \"value\"}]} self.resource_helper.get_collection() mock_get.assert_called_once_with(self.URI) @mock.patch.object(connection, \"get\") def test_get_collection_with_filter(self, mock_get):", "mock_get, mock_wait4task, mock_post_multipart): uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "= self.task filter = \"name='Exchange Server'\" uri = \"/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True\" self.resource_client.delete_all(filter=filter,", "@mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value =", "'replace', '/name', 'new_name', 60) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection,", "resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( '/rest/testuri/id', dict_info, custom_headers={'Extra': 'extra', 'Content-Type': 'application/json-patch+json'})", "= ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\":", "custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") def test_create_should_return_new_resource_instance(self, mock_post):", "self.resource_client.get_by_id(\"123\") self.assertIsNone(response) mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_collection_uri(self, mock_get): mock_get.return_value =", "self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None)", "\"type\": self.TYPE_V300, \"uri\": uri} self.resource_client._merge_default_values() self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(Resource,", "'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch')", "call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get') def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list =", "default_type) expected_list = [ {\"name\": \"resource1\", \"type\": \"type1\"}, {\"name\": \"resource2\",", "{} expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1)", "mock_patch.assert_called_once_with( \"/rest/testuri\", request_body, custom_headers={\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def", "result = self.resource_client.create(dict_to_create, -1) self.assertEqual(result, created_resource) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task')", "] for option in options: uri = self.resource_client._helper.build_subresource_uri(option[\"resource\"], option[\"subresource\"], option[\"path\"])", "as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_update_with_empty_dict(self): try:", "self.assertEqual(len(collection), 2) def test_build_uri_with_id_should_work(self): input = \"09USE7335NW35\" expected_output = \"/rest/testuri/09USE7335NW35\"", "'8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=15) expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]", "connection(self.host, 300) self.resource_client = ResourceClient(self.connection, self.URI) self.task = {\"task\": \"task\",", "'get') def test_get_by_id_uri(self, mock_get): self.resource_client.get('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(ResourceClient, 'get_by')", "dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_custom_headers(self, mock_put): dict_to_update = {\"name\":", "mock_post): mock_post.return_value = None, self.response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1)", "result = self.resource_client.download(uri, file_path) self.assertFalse(result) def test_transform_list_to_dict(self): list = ['one',", "to test resource patch operations\"\"\" class StubResourceUtilization(ResourceUtilizationMixin, Resource): \"\"\"Stub class", "expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_should_not_override_resource_properties(self, mock_put): dict_to_update = {\"name\":", "mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body() mock_post.assert_called_once_with(", "\"/rest/enclosures\" result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, self.response_body) class ResourcePatchMixinTest(BaseTest):", "custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') def test_update_with_force(self, mock_put):", "@mock.patch.object(connection, \"get\") def test_get_collection_with_path(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(path=\"/test\") mock_get.assert_called_once_with(self.URI", "self.task mock_wait4task.return_value = response_body result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result,", "{}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post):", "mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_helper.get_all(uri=\"/rest/other/resource/12467836/subresources\") except exceptions.HPOneViewUnknownType", "mock_update.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "dict_to_update = { \"resource_name\": \"a name\", \"uri\": \"a_uri\", } mock_put.return_value", "'post') def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None, self.response_body result =", "self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body() mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection,", "= io.StringIO() result = self.resource_client.download(uri, file_path) self.assertFalse(result) class ResourceZeroBodyMixinTest(BaseTest): def", "test_patch_with_custom_headers_v200(self, mock_patch, mock_ensure_resource): mock_patch.return_value = {}, {} self.connection._apiVersion = 200", "{\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body expected_dict", "Resource): \"\"\"Stub class to test resource utilization methods\"\"\" class StubResourceSchema(ResourceSchemaMixin,", "mock_patch.return_value = {}, {} self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY,", "def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri( \"/rest/test/another/resource/uri/09USE7335NW3\") except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI,", "self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") self.assertEqual(self.resource_client.data, entity)", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "None, 'members': [{'id': '7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all()", "self.resource_client.create_with_zero_body('1', custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task')", "merged_resource = merge_resources(resource1, resource2) self.assertEqual(merged_resource, expected_resource) def test_merge_default_values(self): default_type =", "'wait_for_task') def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body = {\"resource_name\": \"name\"} mock_post.return_value", "= self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client.build_uri(None) except ValueError", "\"FCIssueResponseV2\", \"created\": \"2015-03-24T15: 32: 50.889Z\"}, {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-13T14: 10:", "+ \"/test\") @mock.patch.object(connection, \"get\") def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {}", "DEFAULT_VALUES = { \"200\": {\"type\": TYPE_V200}, \"300\": {\"type\": TYPE_V300} }", "self.fail(\"Expected Exception was not raised\") def test_build_subresource_uri(self): options = [", "try: self.resource_client.create_report(\"/rest/path/create-report\") except exceptions.HPOneViewException as exception: self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED, exception.args[0]) else: self.fail(\"Expected", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_client.get_collection('12345') mock_get.assert_called_once_with(self.URI", "def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client._helper.build_subresource_uri(None, \"123456\", \"sub-path\") except exceptions.HPOneViewValueError as exception:", "test_update_without_default_values(self, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"name\": \"test\"}", "self.resource_client.data = [] self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers, self.resource_client.ensure_resource_data) @mock.patch.object(ResourceHelper, \"do_get\") def test_ensure_resource_raise_resource_not_found_exception_with_uri(self, mock_do_get):", "= '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true'", "Exception was not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri(\"/rest/\") except exceptions.HPOneViewUnknownType", "dict_to_update = {\"name\": \"test\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, custom_headers=self.custom_headers)", "= {\"type\": \"type1\"} resource1 = {\"name\": \"resource1\"} resource2 = {\"name\":", "exception.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor,", "as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception was not raised\")", "mock_wait4task.return_value = response_body new_resource = self.resource_client.create_with_zero_body(timeout=-1) self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(connection, \"post\")", "custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_custom_headers(self, mock_post): dict_to_create = {\"resource_name\": \"a", "\"wait_for_task\") def test_patch_return_entity(self, mock_wait4task, mock_patch, mock_ensure_resource): entity = {\"resource_id\": \"123a53cz\"}", "{'id': '6'}, {'id': '7'}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_requested_count_reached(self,", "mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") @mock.patch.object(connection, 'get') def test_get_by_id_uri(self, mock_get):", "mock_post.return_value = self.task, self.task mock_wait4task.return_value = response_body result = self.resource_client.create_with_zero_body(", "{\"resource_name\": \"a name\", \"force\": \"yes\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create,", "\"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}]}, {\"nextPageUri\": uri_list[2], \"members\": [{\"id\": \"4\"},", "def test_patch_with_custom_headers_v200(self, mock_patch, mock_ensure_resource): mock_patch.return_value = {}, {} self.connection._apiVersion =", "else: self.fail() @mock.patch.object(connection, 'delete') def test_delete_with_dict_uri(self, mock_delete): resource = {\"uri\":", "self.URI self.resource_client.DEFAULT_VALUES = self.DEFAULT_VALUES self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client._merge_default_values() self.task", "custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post):", "mock_delete.assert_called_once_with(mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_uri_called_once(self, mock_put,", "Name,') @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = [] response", "'MyFibreNetwork') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_autofix(self, mock_get_all): mock_get_all.return_value =", "None, \"members\": [{\"id\": \"7\"}]}] mock_get.side_effect = results result = self.resource_client.get_all()", "required by applicable law or agreed to in writing, software", "mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"})", "mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "mock_post_multipart): fake_response_body = mock.Mock() uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In this", "result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri =", "try: self.resource_client._helper.build_uri('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected", "URI = \"/rest/testuri\" TYPE_V200 = \"typeV200\" TYPE_V300 = \"typeV300\" DEFAULT_VALUES", "= {\"resource_data\": \"resource_data\", \"uri\": uri} expected = {\"resource_data\": \"resource_data\", \"uri\":", "\"type\": \"anotherType\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, dict_to_create,", "= self.task, {} mock_wait4task.return_value = created_resource result = self.resource_client.create(dict_to_create, -1)", "self.fail() def test_update_with_empty_dict(self): try: self.resource_client.update({}) except ValueError as e: self.assertTrue(\"Resource\"", "timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v200(self, mock_patch): request_body =", "filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, fake_response_body result = self.resource_client.upload(filepath,", "test_extract_id_from_uri_with_extra_slash(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '') def", "[call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_should_return_all_items_when_response_paginated(self, mock_get):", "self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = \"/rest/testuri?start=0&count=-1\"", "fake_associated_resurce = mock.Mock() uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "headers = {'Content-Type': 'application/json', 'Extra': 'extra'} self.connection._apiVersion = 300 resource_client", "{}, {} expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create,", "\"uri\": \"/rest/testuri\"}] self.resource_client.data = {\"name\": \"testname\"} mock_do_get.return_value = get_by_return_value self.resource_client.ensure_resource_data(update_data=True)", "@mock.patch.object(connection, \"post\") def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None, self.response_body new_resource", "def test_create_report_should_raise_exception_when_not_task(self, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value", "# This example is not supported yet uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/otherthing'", "= ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) def test_merge_api_default_values(self):", "mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' fake_file =", "def test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI for this resource\" uri", "\"a name\"} mock_post.return_value = {}, {} self.connection._apiVersion = 200 expected_dict", "agreed to in writing, software # distributed under the License", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_patch_return_entity(self, mock_wait4task, mock_patch): entity = {\"resource_id\": \"123a53cz\"}", "[{\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]}, {'nextPageUri': None, \"members\": [{\"id\":", "\"/rest/test\"} self.resource_client.update(data={\"name\": \"test\"}) mock_do_put.assert_called_once() mock_ensure_resource.assert_called_once() def test_ensure_resource_raise_unique_identifier_exception(self): self.resource_client.data = []", "mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body(uri=\"/rest/testuri\", custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource,", "= {\"resource_name\": \"updated name\"} mock_do_get.return_value = updated_data self.resource_client.refresh() self.assertEqual(self.resource_client.data, updated_data)", "distributed under the License is distributed on an \"AS IS\"", "{\"type\": TYPE_V300} } def setUp(self, resource_client=None): self.resource_client = resource_client self.resource_client.URI", "mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.delete('1', custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY,", "def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create = {\"resource_name\": \"a name\", \"type\": \"anotherType\"}", "'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_patch_return_entity(self, mock_wait4task, mock_patch): entity = {\"resource_id\":", "test_get_with_uri_should_work(self, mock_get): mock_get.return_value = {} uri = self.URI + \"/ad28cf21-8b15-4f92-bdcf-51cb2042db32\"", "mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v300(self, mock_patch): mock_patch.return_value", "def test_ensure_resource_raise_resource_not_found_exception_without_uri(self, mock_get_by): self.resource_client.data = {\"name\": \"testname\"} mock_get_by.return_value = []", "resoruce zero body methods\"\"\" class StubResourcePatch(ResourcePatchMixin, Resource): \"\"\"Stub class to", "mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_id_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization()", "\"/rest/testuri/09USE7335NW3\" expected_output = \"/rest/testuri/09USE7335NW3\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def", "mock_get.assert_called_once_with(self.URI) @mock.patch.object(connection, \"get\") def test_get_collection_with_filter(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=\"name=name\")", "sort, view, 'name,owner,modified', scope_uris=scope_uris) uri = '{resource_uri}?start=1' \\ '&count=500' \\", "mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value =", "filter = \"'name'='OneViewSDK \\\"Test FC Network'\" sort = 'name:ascending' query", "self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_api_version_200(self, mock_post):", "@mock.patch.object(ResourceHelper, \"do_get\") @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_should_update_resource_data(self, mock_do_get, mock_get_by): get_by_return_value =", "= self.resource_client.get_all() expected_items = [{'id': '1'}, {'id': '2'}, {'id': '3'},", "def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {\"nextPageUri\": None, \"members\": []} result", "resource=None, subresource=\"/rest/testuri/9/sub/10\", path=\"sub\", uri=\"/rest/testuri/9/sub/10\"), dict( resource=\"/rest/testuri/11\", subresource=\"12\", path=\"/sub/\", uri=\"/rest/testuri/11/sub/12\"), dict(", "@mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\"", "\"resource_data\", \"uri\": uri, \"type\": \"typeV300\"} mock_update.return_value = self.task, self.response_body mock_wait4task.return_value", "else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri(", "= [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_should_return_all_items_when_response_paginated(self,", "'delete') def test_delete_with_dict_uri(self, mock_delete): resource = {\"uri\": \"uri\"} mock_delete.return_value =", "\"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value = self.task,", "not raised\") def test_build_subresource_uri(self): options = [ dict( resource='1', subresource='2',", "\"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_uri_with_defaults(self, mock_get, mock_ensure_resource):", "e.args[0]) else: self.fail('Expected Exception was not raised') @mock.patch.object(connection, 'get') def", "@mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value =", "self.resource_client.data = {\"uri\": \"/uri/test\"} mock_do_get.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True)", "{\"nextPageUri\": None, \"members\": []} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection,", "= {'Extra': 'extra'} self.connection._apiVersion = 300 resource_client = ResourceClient(self.connection, self.URI)", "\\ \"&sort=name%3Aascending\".format(resource_uri=self.URI) self.assertEqual([{\"member\": \"member\"}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_defaults(self,", "'put') def test_update_should_not_override_resource_properties(self, mock_put): dict_to_update = {\"name\": \"test\", \"type\": \"anotherType\"}", "ResourceClient(self.connection, self.URI) self.task = {\"task\": \"task\", \"taskState\": \"Finished\"} self.response_body =", "\"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' fake_file = io.StringIO() mock_open.return_value = fake_file", "self.resource_client.delete('1', custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={'Accept-Language': 'en_US'}) def test_delete_dict_invalid_uri(self): dict_to_delete = {\"task\":", "= connection('127.0.0.1', 300) self.resource_client = StubResourceFileHandler(self.connection) super(ResourceFileHandlerMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post_multipart_with_response_handling\")", "@mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch): dict_info = {\"resource_name\": \"a", "mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\", timeout=60) mock_get_completed_task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor,", "self.assertEqual(response, {\"name\": \"value\"}) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_without_result(self,", "mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_collection_uri(self, mock_get): mock_get.return_value = {\"members\": [{\"key\":", "default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_default_api_version_300(self, mock_post): dict_to_create", "self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value =", "for option in options: uri = self.resource_client.build_subresource_uri(option['resource'], option['subresource'], option['path']) self.assertEqual(uri,", "self.assertEqual(self.task, delete_task) mock_delete.assert_called_once_with(self.URI + \"/1?force=True\", custom_headers=None) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task')", "self.resource_client.download(uri, file_path) self.assertFalse(result) class ResourceZeroBodyMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1',", "def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce = mock.Mock() uri =", "expected_output = '/rest/testuri/09USE7335NW35' result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self):", "self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):", "fake_file = io.StringIO() mock_open.return_value = fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb')", "= [{\"name\": \"EXpected\"}, {\"name\": \"not expected\"}] response = self.resource_client.get_by('name', 'exPEcted')", "None, self.response_body self.resource_client.update(dict_to_update, custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\")", "test_build_uri_with_id_should_work(self): input = \"09USE7335NW35\" expected_output = \"/rest/testuri/09USE7335NW35\" result = self.resource_client._helper.build_uri(input)", "mock_get.return_value = {\"members\": [{\"member\": \"member\"}]} result = self.resource_client.get_all( 1, 500,", "'exPEcted') self.assertEqual(response, [{\"name\": \"EXpected\"}]) mock_get_all.assert_called_once_with(filter=\"\\\"name='exPEcted'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_skip_autofix(self,", "= {}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.patch( '123a53cz', 'replace',", "test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch, mock_ensure_resource): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value =", "mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\") mock_post.assert_called_once_with(\"/rest/path/create-report\", {})", "setUp(self, resource_client=None): self.resource_client = resource_client self.resource_client.URI = self.URI self.resource_client.DEFAULT_VALUES =", "uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri)", "'&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get')", "def test_extract_id_from_uri_with_extra_slash(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '')", "self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") @mock.patch.object(connection, 'get') def test_get_by_id_uri(self, mock_get): self.resource_client.get('12345')", "'4'}, {'id': '5'}, {'id': '6'}, {'id': '7'}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection,", "def test_get_all_with_custom_uri(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources\") uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\")", "\"type\": \"typeV300\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) expected_uri = \"/rest/testuri\"", "{}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post):", "mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_should_not_override_resource_properties(self, mock_put): dict_to_update =", "mock.ANY) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_wait_for_task_when_response_is_task(self, mock_get,", "Name,') self.assertEqual(response, {\"name\": \"value\"}) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(ResourceClient, 'get_by') def", "result = self.resource_client.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task')", "\"/rest/testuri?start=0&count=-1\" members = [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}] mock_get.return_value", "\"wait_for_task\") def test_update_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put): response_body = {\"resource_name\": \"name\"}", "\"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" fake_file = io.StringIO() mock_open.return_value = fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path,", "get_by_return_value[0]) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_without_data_update(self, mock_get_by): mock_get_by.return_value = [] actual_result", "{\"name\": \"test\"} uri = \"/rest/testuri\" expected = {\"name\": \"test\", \"uri\":", "mock_wait4task.return_value = entity self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', -1) mock_wait4task.assert_called_once_with(self.task,", "else: self.fail() @mock.patch.object(connection, 'get') def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI +", "def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {\"nextPageUri\": None, \"members\": None} result", "+ \"/schema\") @mock.patch.object(connection, 'get') def test_get_by_id_uri(self, mock_get): self.resource_client.get('12345') mock_get.assert_called_once_with(self.URI +", "self.fail(\"Expected Exception was not raised\") def test_merge_resources(self): resource1 = {\"name\":", "response = self.resource_client.get_by_name(\"Resource Name,\") self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(connection, \"get\")", "path=\"sub\", uri=\"/rest/testuri/1/sub/2\"), dict( resource=\"/rest/testuri/3\", subresource=\"4\", path=\"sub\", uri=\"/rest/testuri/3/sub/4\"), dict( resource=\"5\", subresource=\"/rest/testuri/5/sub/6\",", "test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "not raised\") def test_build_subresource_uri(self): options = [ dict( resource=\"1\", subresource=\"2\",", "def test_patch_request_when_id_is_provided_v200(self, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" request_body = [{", "\"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = False mock_open.return_value = io.StringIO() result = self.resource_client.download(uri,", "{} mock_wait4task.return_value = self.task self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(Resource,", "= {\"name\": \"test\", \"type\": \"anotherType\", \"uri\": uri} self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected,", "\"/rest/testuri\" mock_put.assert_called_once_with(expected_uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_default_api_version_300(self,", "exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def", "resource1 = {\"name\": \"resource1\", \"type\": \"resource\"} resource2 = {\"name\": \"resource2\",", "'members': [{'id': '7'}]}] mock_get.side_effect = results result = self.resource_client.get_all(count=15) expected_items", "\"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body expected_dict =", "\"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v200(self, mock_patch, mock_ensure_resource): mock_patch.return_value", "name\", \"force\": \"yes\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, timeout=-1) expected_uri", "timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, \"put\") def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value =", "self.fail() @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update): mock_update.return_value", "self.assertEqual(uri, option['uri']) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client.build_subresource_uri(None, \"123456\", 'sub-path') except exceptions.HPOneViewValueError", "not raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client.build_uri('') except ValueError as exception:", "\"Finished\"} self.response_body = {\"body\": \"body\"} self.custom_headers = {\"Accept-Language\": \"en_US\"} class", "{\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_helper.get_collection() mock_get.assert_called_once_with(self.URI) @mock.patch.object(connection, \"get\") def", "self.resource_client._merge_default_values() expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1)", "[ dict( resource=\"1\", subresource=\"2\", path=\"sub\", uri=\"/rest/testuri/1/sub/2\"), dict( resource=\"/rest/testuri/3\", subresource=\"4\", path=\"sub\",", "raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client.build_uri( '/rest/test/another/resource/uri/09USE7335NW3') except exceptions.HPOneViewUnknownType as exception:", "mock_patch, mock_ensure_resource): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task, self.task", "= {\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task, self.task mock_wait4task.return_value = entity", "def test_get_by_id_with_result(self, mock_get): self.resource_client.get_by_id(\"123\") mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_by_id_without_result(self, mock_get):", "self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( uri, dict_info, custom_headers={\"Extra\":", "def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None, self.response_body self.resource_client.URI = \"/rest/enclosures\"", "self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( '/rest/testuri', {}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "= self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', -1) self.assertEqual(result, entity) @mock.patch.object(connection,", "mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\"} uri = \"/rest/testuri\" mock_put.return_value", "= '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_uri_with_defaults(self, mock_get): self.resource_client.get_utilization('/rest/testuri/09USE7335NW3') expected_uri", "self.connection._apiVersion = 200 expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V200} self.resource_client.update(dict_to_update,", "mock_wait4task.return_value = self.task self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={\"Accept-Language\":", "= self.task, self.task mock_wait4task.return_value = response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration',", "self.resource_client.get_all(count=15) expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get')", "def test_ensure_resource_should_call_once(self, mock_do_put, mock_ensure_resource): self.resource_client.data = {\"uri\": \"/rest/test\"} self.resource_client.update(data={\"name\": \"test\"})", "\"3\"}, {\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}, {\"id\": \"7\"}] self.assertSequenceEqual(result,", "'typeV300' DEFAULT_VALUES = { '200': {'type': TYPE_V200}, '300': {'type': TYPE_V300}", "self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None,", "mock_wait4task, mock_update): mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('1',", "= {\"name\": \"test\"} uri = \"/rest/testuri\" mock_put.return_value = None, self.response_body", "'post') def test_create_with_default_api_version_300(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value", "mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources\") uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri_and_query_string(self,", "mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value') uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\"", "self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources') except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail('Expected Exception", "@mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value =", "mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_return_entity(self, mock_wait4task,", "was not raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client.build_uri('') except ValueError as", "mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, \"SPPgen9snap6.2015_0405.81.iso\") @mock.patch.object(connection,", "def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" dict_info =", "self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\")", "\"wait_for_task\") def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value = self.task, self.task", "\"/rest/testuri\"} result = self.resource_client.delete() self.assertTrue(result) @mock.patch.object(connection, 'delete') def test_helper_delete_all_should_return_true(self, mock_delete):", "'300': {'type': TYPE_V300} } def setUp(self): super(ResourceClientTest, self).setUp() self.host =", "\"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource):", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_args(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization(fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=\"startDate=2016-05-30T03:29:42.361Z\",", "mock_get.return_value = {} self.resource_helper.get_collection(path=\"/test\") mock_get.assert_called_once_with(self.URI + \"/test\") @mock.patch.object(connection, \"get\") def", "as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected Exception was not raised\")", "= 200 self.resource_client._merge_default_values() expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V200}", "test_get_by_id_uri(self, mock_get): self.resource_client.get('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_with_result(self,", "Name,') self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(connection, 'get') def test_get_collection_uri(self, mock_get):", "= \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_uri_with_defaults(self, mock_get,", "response_body result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, \"put\")", "expected = {\"name\": \"test\", \"uri\": uri, \"type\": \"typeV300\"} mock_put.return_value =", "= 'typeV300' DEFAULT_VALUES = { '200': {'type': TYPE_V200}, '300': {'type':", "created_resource = { \"resource_id\": \"123\", \"resource_name\": \"a name\", } mock_post.return_value", "200 self.resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})", "self.assertEqual(extracted_id, '') def test_extract_id_from_uri_passing_id(self): uri = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri)", "{}, {} self.connection._apiVersion = 200 self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name',", "OR CONDITIONS OF ANY KIND, either express or implied. #", "raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client.build_uri('/rest/') except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI,", "self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self):", "\"?filter=name%3Dname\") @mock.patch.object(connection, \"get\") def test_get_collection_with_path(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(path=\"/test\")", "mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value') uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_different_resource_uri_should_fail(self,", "NE 'WrongName'\" mock_get.return_value = {\"members\": [{\"member\": \"member\"}]} result = self.resource_helper.get_all(", "self.task self.resource_client.update_with_zero_body('1', custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor,", "the License is distributed on an \"AS IS\" BASIS, #", "\"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\ \"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\")", "+ \"/schema\") class ResourceTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300)", "self.assertFalse(result) def test_transform_list_to_dict(self): list = ['one', 'two', {'tree': 3}, 'four',", "to test resource file operations\"\"\" class StubResourceZeroBody(ResourceZeroBodyMixin, Resource): \"\"\"Stub class", "mock_get, mock_wait4task, mock_post_multipart): uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "[\"/rest/testuri?start=0&count=15\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"},", "= self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\",", "'post') def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create = {\"resource_name\": \"a name\", \"type\":", "def test_update_uri(self, mock_wait4task, mock_update, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update =", "self.fail() def test_get_by_with_name_none(self): try: self.resource_client.get_by(None, None) except ValueError as e:", "self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(result, dict_to_update) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once(self,", "test_patch_request_custom_headers(self, mock_task, mock_patch): dict_info = {\"resource_name\": \"a name\"} mock_patch.return_value =", "self.resource_client.create_report(\"/rest/path/create-report\") except exceptions.HPOneViewException as exception: self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED, exception.args[0]) else: self.fail(\"Expected Exception", "'four', 5] dict_transformed = transform_list_to_dict(list=list) self.assertEqual(dict_transformed, {'5': True, 'four': True,", "mock_wait4task, mock_update, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"resource_data\": \"resource_data\",", "result = self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', -1) self.assertEqual(result, entity)", "\\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def", "{\"id\": \"6\"}]}, {\"nextPageUri\": None, \"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect", "custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_custom_headers(self, mock_post): dict_to_create = {\"resource_name\": \"a", "\"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart):", "io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_open_file(self,", "\"uri\": uri} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) self.assertEqual(self.response_body, self.resource_client.data) mock_put.assert_called_once_with(uri,", "def test_refresh(self, mock_do_get): updated_data = {\"resource_name\": \"updated name\"} mock_do_get.return_value =", "mock_post_multipart.assert_called_once_with('/rest/testuri', mock.ANY, mock.ANY) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def", "io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def", "'wait_for_task') def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put): response_body = {\"resource_name\": \"name\"} mock_put.return_value", "custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_without_default_values(self, mock_put, mock_ensure_resource): uri", "test_build_subresource_uri(self): options = [ dict( resource=\"1\", subresource=\"2\", path=\"sub\", uri=\"/rest/testuri/1/sub/2\"), dict(", "\"resource\", \"port\": \"1\"} merged_resource = merge_resources(resource1, resource2) self.assertEqual(merged_resource, expected_resource) def", "\"{resource_uri}?start=1\" \\ \"&count=500\" \\ \"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27\" \\ \"&query=name%20NE%20%27WrongName%27\" \\ \"&sort=name%3Aascending\".format(resource_uri=self.URI) self.assertEqual([{\"member\":", "updated_data) @mock.patch.object(connection, \"post\") def test_create_uri(self, mock_post): dict_to_create = {\"resource_name\": \"a", "+ \"/1?force=True\", custom_headers=None) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_with_custom_headers(self, mock_wait4task,", "mock_wait4task, mock_post_multipart): uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value =", "law or agreed to in writing, software # distributed under", "test_delete_dict_invalid_uri(self): dict_to_delete = {\"task\": \"task\", \"uri\": \"\"} try: self.resource_client.delete(dict_to_delete, False,", "dict_info, custom_headers=headers) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers(self, mock_task, mock_patch):", "expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_uri_with_defaults(self,", "resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) def test_should_not_merge_when_default_values_not_defined(self): resource = {'name': 'resource1'}", "\"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\ \"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\")", "{\"task\": \"task\", \"taskState\": \"Finished\"} self.response_body = {\"body\": \"body\"} self.custom_headers =", "'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={}) @mock.patch.object(connection, 'patch')", "{\"members\": [{\"member\": \"member\"}]} result = self.resource_helper.get_all( 1, 500, filter, query,", "else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, \"get\") def test_get_all_should_do_multi_requests_when_response_paginated(self,", "mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task filter = \"name='Exchange", "mock_do_get.return_value = updated_data self.resource_client.refresh() self.assertEqual(self.resource_client.data, updated_data) @mock.patch.object(connection, \"post\") def test_create_uri(self,", "except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail(\"Expected Exception was", "= self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value", "test_update_return_entity(self, mock_wait4task, mock_put): dict_to_update = { \"resource_name\": \"a name\", \"uri\":", "response_body) @mock.patch.object(connection, \"put\") def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None, self.response_body", "try: self.resource_client._helper.build_uri(\"/rest/\") except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected", "def test_merge_api_default_values(self): resource = {'name': 'resource1'} default_values = { '200':", "'5'}, {'id': '6'}]}, {'nextPageUri': None, 'members': [{'id': '7'}]}] mock_get.side_effect =", "= {} self.resource_client.get_collection('12345', 'name=name') mock_get.assert_called_once_with(self.URI + \"/12345?filter=name%3Dname\") @mock.patch.object(connection, 'get') def", "= {\"resource_name\": \"a name\", \"uri\": uri} mock_put.return_value = self.task, {}", "-1) except exceptions.HPOneViewUnknownType as e: self.assertEqual(\"Unknown object type\", e.args[0]) else:", "custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def", "'en_US'}) def test_delete_dict_invalid_uri(self): dict_to_delete = {\"task\": \"task\", \"uri\": \"\"} try:", "def test_get_utilization_with_empty(self): try: self.resource_client.get_utilization('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])", "dict( resource='/rest/testuri/13', subresource=None, path='/sub/', uri='/rest/testuri/13/sub'), ] for option in options:", "test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri':", "\"members\": [{\"id\": \"7\"}]}] mock_get.side_effect = results result = self.resource_client.get_all(count=15) expected_items", "= self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get):", "### import io import unittest import mock from mock import", "{\"id\": \"3\"}] mock_get.return_value = { \"nextPageUri\": uri, \"members\": members, \"uri\":", "mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task filter =", "test_get_utilization_with_empty(self): try: self.resource_client.get_utilization('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else:", "\"/rest/testuri/09USE7335NW3\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client._helper.build_uri(None)", "mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"name\": \"test\", \"type\":", "= StubResourceSchema(self.connection) super(ResourceSchemaMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"get\") def test_get_schema_uri(self, mock_get): self.resource_client.get_schema()", "mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce = mock.Mock() uri = '/rest/testuri/' filepath", "{\"name\": \"test\", \"type\": self.TYPE_V200} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None)", "= 200 self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with(uri, request_body, custom_headers={}) @mock.patch.object(Resource, \"ensure_resource_data\")", "\\ \"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_multiple_filters(self, mock_get,", "try: fake_resource.get_fake(uri) except exceptions.HPOneViewUnknownType as exception: self.assertEqual(message, exception.args[0]) else: self.fail(\"Expected", "{\"name\": \"testname\"} mock_do_get.return_value = get_by_return_value self.resource_client.ensure_resource_data(update_data=True) self.assertEqual(self.resource_client.data, get_by_return_value[0]) @mock.patch.object(Resource, \"get_by\")", "file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = False mock_open.return_value", "\"members\": [{\"id\": \"7\"}]}] mock_get.side_effect = results result = self.resource_client.get_all() expected_items", "\"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body response =", "this resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" try: self.resource_client.get(uri) except exceptions.HPOneViewUnknownType as", "[{\"key\": \"value\"}, {\"key\": \"value\"}]} collection = self.resource_client.get_collection('12345') self.assertEqual(len(collection), 2) @mock.patch.object(ResourceClient,", "may obtain a copy of the License at # #", "self.task, self.task mock_wait4task.return_value = response_body result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1)", "= {\"name\": \"test\"} uri = \"/rest/testuri\" expected = {\"name\": \"test\",", "{}, {} headers = {'Content-Type': 'application/json', 'Extra': 'extra'} self.connection._apiVersion =", "resource_client = ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( '/rest/testuri/id', dict_info,", "ResourceFileHandlerMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceFileHandler(self.connection)", "'get_by') def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = [] response = self.resource_client.get_by_name('Resource", "self.resource_client.get(None) except ValueError as e: self.assertTrue(\"id\" in e.args[0]) else: self.fail()", "= {\"body\": \"body\"} self.custom_headers = {'Accept-Language': 'en_US'} @mock.patch.object(connection, 'get') def", "self.resource_client = StubResourceUtilization(self.connection) super(ResourceUtilizationMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def", "\"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_should_not_override_resource_properties(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\":", "@mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_return_entity(self, mock_wait4task, mock_post): dict_to_create =", "@mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task,", "= None, self.response_body new_resource = self.resource_client.create_with_zero_body() self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\")", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch): entity = {\"resource_id\": \"123a53cz\"}", "'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch): entity", "= {'name': 'resource1'} default_values = {} expected = {'name': 'resource1'}", "500, filter, query, sort) uri = \"{resource_uri}?start=1\" \\ \"&count=500\" \\", "'7'}]}] mock_get.side_effect = results result = self.resource_client.get_all(count=15) expected_items = [{'id':", "name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\")", "self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with(\"/rest/testuri\", mock.ANY, mock.ANY) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\")", "{\"key\": \"value\"}]} collection = self.resource_helper.get_collection() self.assertEqual(len(collection), 2) def test_build_uri_with_id_should_work(self): input", "\"EnclosureGroupV300\"} } expected = {'name': 'resource1', \"type\": \"EnclosureGroupV300\"} resource_client =", "self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client._merge_default_values() self.task = {\"task\": \"task\", \"taskState\":", "expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get') def", "mock_patch.assert_called_once_with( uri, dict_info, custom_headers={\"Extra\": \"extra\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "= self.resource_client.download(uri, file_path) self.assertFalse(result) def test_transform_list_to_dict(self): list = ['one', 'two',", "'patch') def test_patch_with_custom_headers_v300(self, mock_patch): mock_patch.return_value = {}, {} resource_client =", "mock_delete, mock_ensure_resource): mock_delete.return_value = None, self.response_body self.resource_client.data = {\"uri\": \"/rest/testuri\"}", "'/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection,", "uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" try: self.resource_client.get(uri) except exceptions.HPOneViewUnknownType as exception: self.assertEqual(message,", "may not use this file except in compliance with the", "operations\"\"\" class StubResourceZeroBody(ResourceZeroBodyMixin, Resource): \"\"\"Stub class to test resoruce zero", "\"port\": \"1\"} merged_resource = merge_resources(resource1, resource2) self.assertEqual(merged_resource, expected_resource) def test_merge_default_values(self):", "self.resource_client.delete(dict_to_delete, False, -1) except exceptions.HPOneViewUnknownType as e: self.assertEqual(\"Unknown object type\",", "self.task mock_wait4task.return_value = entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") self.assertEqual(self.resource_client.data, entity) @mock.patch.object(Resource,", "options = [ dict( resource=\"1\", subresource=\"2\", path=\"sub\", uri=\"/rest/testuri/1/sub/2\"), dict( resource=\"/rest/testuri/3\",", "def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_helper.get_all(uri=\"/rest/other/resource/12467836/subresources\") except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI,", "\"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body resource_client =", "self.resource_client.build_uri('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception", "this file except in compliance with the License. # You", "= self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, \"put\") def test_update_with_zero_body_without_task(self,", "= self.resource_client.build_subresource_uri(option['resource'], option['subresource'], option['path']) self.assertEqual(uri, option['uri']) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client.build_subresource_uri(None,", "setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourcePatch(self.connection) super(ResourcePatchMixinTest, self).setUp(self.resource_client)", "{\"uri\": \"/rest/testuri\"} self.resource_client.delete(custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\")", "['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'},", "self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_called_once(self,", "nextPageUri is returned by OneView. \"\"\" uri_list = ['/rest/testuri?start=0&count=3', '/rest/testuri?start=3&count=3',", "1, 500, filter, query, sort) uri = \"{resource_uri}?start=1\" \\ \"&count=500\"", "mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) self.assertEqual(self.response_body, self.resource_client.data) mock_put.assert_called_once_with(uri, expected, custom_headers=None)", "\"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_uri(self, mock_wait4task, mock_update, mock_ensure_resource):", "\"patch\") def test_patch_request_when_id_is_provided_v200(self, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" request_body =", "uri = self.URI + \"/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" self.resource_client.get(uri) mock_get.assert_called_once_with(uri) def test_get_with_uri_with_incompatible_url_shoud_fail(self): message", "self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_create_with_empty_dict(self): try: self.resource_client.create({}) except", "mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_force(self, mock_put,", "tests.test_utils import mock_builtin from hpOneView.connection import connection from hpOneView import", "dict_to_create, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_custom_headers(self, mock_post): dict_to_create = {\"resource_name\":", "path='/sub/', uri='/rest/testuri/13/sub'), ] for option in options: uri = self.resource_client.build_subresource_uri(option['resource'],", "test_build_uri_with_uri_should_work(self): input = \"/rest/testuri/09USE7335NW3\" expected_output = \"/rest/testuri/09USE7335NW3\" result = self.resource_client._helper.build_uri(input)", "# # Licensed under the Apache License, Version 2.0 (the", "members, \"uri\": uri } result = self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri)", "name\"} mock_patch.return_value = {}, {} headers = {'Content-Type': 'application/json', 'Extra':", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "= self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value = None, {} try:", "def test_extract_id_from_uri_unsupported(self): # This example is not supported yet uri", "uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{\"nextPageUri\": uri_list[1], \"members\":", "self.resource_client.create(dict_to_create, -1) self.assertEqual(result, created_resource) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_create(self,", "call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list", "= self.task self.resource_client.update_with_zero_body('1', custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put')", "\\ '&sort=name%3Aascending' \\ '&view=%22%7Bview-name%7D%22' \\ '&fields=name%2Cowner%2Cmodified' \\ '&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI) self.assertEqual([{'member': 'member'}],", "ResourceZeroBodyMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceZeroBody(self.connection)", "\"/rest/resource/test\" mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, dict_to_update,", "= self.task self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'patch') def", "self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={})", "mock_post): dict_to_create = {\"resource_name\": \"a name\", \"type\": \"anotherType\"} mock_post.return_value =", "= self.resource_client.update(dict_to_update, uri=uri) self.assertEqual(self.response_body, response) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put')", "Exception was not raised\") def test_merge_resources(self): resource1 = {\"name\": \"resource1\",", "}] mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch( '123a53cz',", "['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'},", "mock_do_put.assert_called_once() mock_ensure_resource.assert_called_once() def test_ensure_resource_raise_unique_identifier_exception(self): self.resource_client.data = [] self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers, self.resource_client.ensure_resource_data) @mock.patch.object(ResourceHelper,", "{\"nextPageUri\": None, \"members\": None} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(ResourceHelper,", "mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_and_custom_headers(self,", "None, self.response_body self.connection._apiVersion = 200 expected_dict = {\"name\": \"test\", \"type\":", "ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED,", "resource1 = {\"name\": \"resource1\"} resource2 = {\"name\": \"resource2\"} result_list =", "\"wait_for_task\") def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value", "def test_get_by_name_with_result(self, mock_get_by): mock_get_by.return_value = [{\"name\": \"value\"}] response = self.resource_client.get_by_name('Resource", "@mock.patch.object(connection, \"put\") def test_update_without_default_values(self, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update", "= created_resource result = self.resource_client.create(dict_to_create, -1) self.assertEqual(result, created_resource) @mock.patch.object(connection, 'post')", "\"anotherType\"} uri = \"/rest/testuri\" mock_put.return_value = None, self.response_body expected =", "self.response_body expected = {\"name\": \"test\", \"type\": \"anotherType\", \"uri\": uri} self.resource_client.update(dict_to_update)", "timeout=-1) self.assertTrue(result) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_should_wait_for_task(self, mock_wait4task, mock_delete):", "mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\", \"type\": \"anotherType\"} uri =", "\"type\": self.TYPE_V300} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put')", "self.resource_client.get_all() expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\")", "self.resource_client.get_collection(None) except ValueError as e: self.assertTrue(\"id\" in e.args[0]) else: self.fail()", "= self.task, {} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\", timeout=60) mock_get_completed_task.assert_called_once_with(self.task, 60)", "\"a name\", \"type\": \"anotherType\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, default_values=self.DEFAULT_VALUES)", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_return_entity(self, mock_wait4task, mock_put): dict_to_update = { \"resource_name\":", "\"uri\": uri, \"type\": \"typeV300\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri,", "mock_post_multipart.return_value = self.task, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, 'post_multipart_with_response_handling')", "test_update_without_default_values(self, mock_put): dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value", "result = self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_response_has_no_items(self,", "uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources?param=value\")", "None, self.response_body self.resource_client.update(dict_to_update) expected_uri = \"/rest/testuri\" mock_put.assert_called_once_with(expected_uri, expected, custom_headers=None) @mock.patch.object(Resource,", "None, self.response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection,", "test_get_by_property(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_autofix(self,", "= \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with(\"/rest/testuri\", mock.ANY, mock.ANY)", "@mock.patch.object(connection, 'put') def test_update_with_uri_called_once(self, mock_put): dict_to_update = {\"name\": \"test\"} uri", "@mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {'nextPageUri': None, 'members':", "filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri,", "def test_create_with_custom_headers(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value =", "{\"resource_name\": \"a name\"} mock_patch.return_value = {}, {} headers = {'Extra':", "[{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection,", "was not raised\") def test_build_uri_with_id_should_work(self): input = '09USE7335NW35' expected_output =", "def test_create_with_api_version_200(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value =", "\"/rest/testuri\"} self.resource_client._merge_default_values() self.task = {\"task\": \"task\", \"taskState\": \"Finished\"} self.response_body =", "Exception was not raised\") @mock.patch.object(connection, \"get\") def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list", "raised') @mock.patch.object(connection, 'get') def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3',", "mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_update.assert_called_once_with(", "= { \"resource_name\": \"a name\", \"uri\": \"a_uri\", } mock_put.return_value =", "e: self.assertTrue(\"id\" in e.args[0]) else: self.fail() def test_get_collection_with_none(self): try: self.resource_client.get_collection(None)", "@mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_force(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value", "task_output) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_empty_list_when_output_is_empty(self, mock_get_completed_task, mock_post): task_with_output", "name\"} mock_do_get.return_value = updated_data self.resource_client.refresh() self.assertEqual(self.resource_client.data, updated_data) @mock.patch.object(connection, \"post\") def", "mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=uri, force=True) expected_uri = \"/rest/resource/test?force=True\"", "@mock.patch.object(connection, 'get') def test_get_utilization_by_id_with_defaults(self, mock_get): self.resource_client.get_utilization('09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri)", "as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail(\"Expected Exception was not raised\")", "dict_to_update = {\"name\": \"test\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=\"/path\",", "test_ensure_resource_raise_unique_identifier_exception(self): self.resource_client.data = [] self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers, self.resource_client.ensure_resource_data) @mock.patch.object(ResourceHelper, \"do_get\") def test_ensure_resource_raise_resource_not_found_exception_with_uri(self,", "def test_delete_with_empty_dict(self): try: self.resource_client.delete({}) except ValueError as e: self.assertTrue(\"Resource\" in", "not raised\") @mock.patch.object(connection, \"get\") def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\",", "self.response_body mock_wait4task.return_value = self.task self.resource_client.delete('1', custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={'Accept-Language': 'en_US'}) def", "\"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task,", "\"/rest/testuri\" dict_info = {\"resource_name\": \"a name\"} mock_patch.return_value = {}, {}", "request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_request_when_uri_is_provided(self, mock_patch): request_body =", "\"type1\"} ] self.assertEqual(result_list, expected_list) def test_raise_unavailable_method_exception(self): self.assertRaises(exceptions.HPOneViewUnavailableMethod, unavailable_method) class FakeResource(object):", "= \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" fake_file = io.StringIO() mock_open.return_value =", "test_raise_unavailable_method_exception(self): self.assertRaises(exceptions.HPOneViewUnavailableMethod, unavailable_method) class FakeResource(object): def __init__(self, con): self._connection =", "@mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_do_post_request(self, mock_get_completed_task, mock_post): task_with_output =", "or implied. # See the License for the specific language", "merge_default_values, unavailable_method) class StubResourceFileHandler(ResourceFileHandlerMixin, Resource): \"\"\"Stub class to test resource", "'name2=two', 'name=three']) mock_get.assert_called_once_with(self.URI + \"/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, 'get') def test_get_collection_should_return_list(self, mock_get):", "= \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_helper.get_all(uri=\"/rest/other/resource/12467836/subresources\")", "ResourceClient(self.connection, self.URI) resource_client.update(dict_to_update, uri=uri) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor,", "task_with_output['taskOutput'] = task_output mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output", "'get') def test_get_utilization_with_multiple_filters(self, mock_get): self.resource_client.get_utilization( '09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter=['startDate=2016-05-30T03:29:42.361Z', 'endDate=2016-05-31T03:29:42.361Z'], refresh=True,", "= \"name:ascending\" query = \"name NE 'WrongName'\" mock_get.return_value = {\"members\":", "def test_get_utilization_by_id_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource,", "self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def", "mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\")", "'patch') def test_patch_request_when_uri_is_provided(self, mock_patch): request_body = [{ 'op': 'replace', 'path':", "300 resource_client = ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with('/rest/testuri/id', dict_info,", "members = [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}] mock_get.return_value =", "= [{\"name\": \"expected\"}, {\"name\": \"not expected\"}] response = self.resource_client.get_by('connection.name', 'expected')", "extract_id_from_uri(uri) self.assertEqual(id, extracted_id) def test_extract_id_from_uri_with_extra_slash(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/' extracted_id =", "None) @mock.patch.object(ResourceHelper, \"do_put\") @mock.patch.object(Resource, \"ensure_resource_data\") def test_ensure_resource_should_call_once(self, mock_do_put, mock_ensure_resource): self.resource_client.data", "\"testname\", \"uri\": \"/rest/testuri\"}] self.resource_client.data = {\"name\": \"testname\"} mock_do_get.return_value = get_by_return_value", "\"uri\": uri} mock_put.return_value = self.task, {} mock_wait4task.return_value = dict_to_update self.resource_client.update(dict_to_update,", "'&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_with_multiple_filters(self, mock_get): self.resource_client.get_utilization(", "else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client.build_uri('')", "def test_update_with_custom_headers(self, mock_put): dict_to_update = {\"name\": \"test\"} mock_put.return_value = None,", "= {} self.resource_client.get_collection('12345', ['name1=one', 'name2=two', 'name=three']) mock_get.assert_called_once_with(self.URI + \"/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection,", "mock_get): self.resource_client.get_by_id(\"123\") mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_by_id_without_result(self, mock_get): mock_get.return_value =", "'/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'put') def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value", "test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=1\"] results = [{\"nextPageUri\":", "self.resource_client.patch(\"replace\", \"/name\", \"new_name\") self.assertEqual(self.resource_client.data, entity) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor,", "= {}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI,", "= self.resource_client.get_all( 1, 500, filter, query, sort, view, 'name,owner,modified', scope_uris=scope_uris)", "for this resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" try: self.resource_client.get(uri) except exceptions.HPOneViewUnknownType", "self.resource_client.get_utilization( '09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter=['startDate=2016-05-30T03:29:42.361Z', 'endDate=2016-05-31T03:29:42.361Z'], refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization'", "mock_get.return_value = {'nextPageUri': None, 'members': []} result = self.resource_client.get_all() self.assertEqual(result,", "= None, self.response_body filter = \"name='Exchange Server'\" result = self.resource_client.delete_all(filter=filter,", "filter=[\"startDate=2016-05-30T03:29:42.361Z\", \"endDate=2016-05-31T03:29:42.361Z\"], refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\", "\\ '&fields=name%2Cowner%2Cmodified' \\ '&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI) self.assertEqual([{'member': 'member'}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get')", "'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_with_custom_headers(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task,", "name\"} mock_patch.return_value = {}, {} headers = {'Extra': 'extra'} self.connection._apiVersion", "test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "StubResourceUtilization(self.connection) super(ResourceUtilizationMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_args(self, mock_get,", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_force(self, mock_ensure_resource, mock_delete,", "resource = {\"uri\": \"uri\"} mock_delete.return_value = {}, {} delete_result =", "= {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_helper.get_collection() mock_get.assert_called_once_with(self.URI) @mock.patch.object(connection, \"get\")", "response = self.resource_client.get_by('connection.name', 'expected') self.assertEqual(response, [{'name': 'expected'}, {'name': 'not expected'}])", "\"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_patch_return_entity(self, mock_wait4task, mock_patch, mock_ensure_resource): entity =", "mock_get): self.resource_client.get('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_with_result(self, mock_get_by):", "= connection('127.0.0.1', 300) self.resource_client = StubResource(self.connection) super(ResourceTest, self).setUp(self.resource_client) self.resource_helper =", "\"get\") def test_get_collection_uri(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\":", "@mock.patch.object(connection, 'get') def test_get_all_with_custom_uri(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources') uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri)", "mock_ensure_resource): uri = \"/rest/testuri\" request_body = [{ \"op\": \"replace\", \"path\":", "\"get_by\") def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = [] response = self.resource_client.get_by_name(\"Resource", "= self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_wait4task.assert_called_once_with(self.task,", "mock_ensure_resource): uri = \"/rest/testuri\" dict_info = {\"resource_name\": \"a name\"} mock_patch.return_value", "was not raised') @mock.patch.object(connection, 'get') def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list =", "self.assertEqual(self.resource_client.data, entity) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers_with_content_type(self,", "def test_get_by_property(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def", "raised\") @mock.patch.object(connection, 'post') def test_create_when_the_resource_is_a_list(self, mock_post): dict_to_create = [{\"resource_name\": \"a", "= self.task, {} mock_wait4task.return_value = dict_to_update self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(self.resource_client.data, dict_to_update)", "= \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources')", "mock_wait4task.assert_called_once_with(self.task, mock.ANY) class ResourceUtilizationMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300)", "self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, \"get\") def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get):", "@mock.patch.object(connection, \"get\") def test_get_by_uri(self, mock_get): self.resource_client.get_by_uri(\"/rest/testuri\") mock_get.assert_called_once_with('/rest/testuri') @mock.patch.object(connection, \"get\") def", "= get_by_return_value self.resource_client.ensure_resource_data(update_data=True) self.assertEqual(self.resource_client.data, get_by_return_value[0]) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_without_data_update(self, mock_get_by):", "{'id': '2'}, {'id': '3'}] mock_get.return_value = { 'nextPageUri': uri, 'members':", "\\\"Test FC Network'\" sort = 'name:ascending' query = \"name NE", "uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try:", "'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce =", "def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=[\"name1=one\", \"name2=two\", \"name=three\"]) mock_get.assert_called_once_with(self.URI", "= [{ \"op\": \"replace\", \"path\": \"/name\", \"value\": \"new_name\", }] mock_patch.return_value", "-1) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri", "def test_get_collection_uri(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]}", "\"\"} try: self.resource_client.delete(dict_to_delete, False, -1) except exceptions.HPOneViewUnknownType as e: self.assertEqual(\"Unknown", "= None, self.response_body self.resource_client.update(dict_to_update, custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource,", "\"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, fake_response_body result = self.resource_client.upload(filepath, uri) self.assertEqual(result,", "'/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'patch') def", "custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_default_api_version_300(self, mock_post): dict_to_create = {\"resource_name\": \"a", "= {\"name\": \"testname\"} mock_do_get.return_value = get_by_return_value self.resource_client.ensure_resource_data(update_data=True) self.assertEqual(self.resource_client.data, get_by_return_value[0]) @mock.patch.object(Resource,", "custom_headers=None) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_with_custom_headers(self, mock_wait4task, mock_delete): mock_delete.return_value", "uri=\"/rest/testuri/9/sub/10\"), dict( resource=\"/rest/testuri/11\", subresource=\"12\", path=\"/sub/\", uri=\"/rest/testuri/11/sub/12\"), dict( resource=\"/rest/testuri/13\", subresource=None, path=\"/sub/\",", "\"'name'='OneViewSDK \\\"Test FC Network'\" sort = \"name:ascending\" query = \"name", "uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri,", "\\\"Test FC Network'\" sort = \"name:ascending\" query = \"name NE", "{} self.resource_client.get_collection('12345', 'name=name') mock_get.assert_called_once_with(self.URI + \"/12345?filter=name%3Dname\") @mock.patch.object(connection, 'get') def test_get_collection_with_multiple_filters(self,", "= None, self.response_body self.resource_client.update(dict_to_update, uri=\"/path\", custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})", "except exceptions.HPOneViewUnknownType as exception: self.assertEqual(message, exception.args[0]) else: self.fail(\"Expected Exception was", "self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client._helper.build_uri(None) except ValueError as", "OneView. \"\"\" uri_list = ['/rest/testuri?start=0&count=3', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri':", "mock_patch.return_value = {}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.patch('/rest/testuri/123', 'operation',", "\"get\") def test_get_collection_with_filter(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=\"name=name\") mock_get.assert_called_once_with(self.URI +", "name\"} mock_post.return_value = {}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create,", "raised\") def test_build_uri_with_id_should_work(self): input = '09USE7335NW35' expected_output = '/rest/testuri/09USE7335NW35' result", "mock_patch.assert_called_once_with( '/rest/testuri/id', dict_info, custom_headers={'Extra': 'extra', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor,", "= '/rest/testuri/09USE7335NW35' result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input", "= \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) class ResourceSchemaMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1',", "def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results =", "self.assertEqual('Unrecognized URI for this resource', e.args[0]) else: self.fail() @mock.patch.object(connection, 'put')", "\"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_custom_headers(self, mock_ensure_resource, mock_delete, mock_wait4task):", "uri, \"type\": \"typeV300\"} mock_update.return_value = self.task, self.response_body mock_wait4task.return_value = self.task", "ResourceClient(self.connection, self.URI) result = resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) def test_should_not_merge_when_default_values_not_defined(self):", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put,", "self.response_body) class ResourcePatchMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client", "mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor,", "dict_info = {\"resource_name\": \"a name\"} mock_patch.return_value = {}, {} headers", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_custom_headers(self, mock_ensure_resource, mock_delete,", "mock_update, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"resource_data\": \"resource_data\", \"uri\":", "self.resource_client.get_utilization('09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_uri_with_defaults(self, mock_get):", "{\"resource_name\": \"updated name\"} mock_do_get.return_value = updated_data self.resource_client.refresh() self.assertEqual(self.resource_client.data, updated_data) @mock.patch.object(connection,", "{}, {} self.connection._apiVersion = 200 expected_dict = {\"resource_name\": \"a name\",", "mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1], \"members\":", "mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v300(self, mock_patch): mock_patch.return_value =", "\"type1\"} resource1 = {\"name\": \"resource1\"} resource2 = {\"name\": \"resource2\"} result_list", "\"name='Exchange Server'\" delete_task = self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_wait4task.assert_called_with(self.task, timeout=-1) self.assertEqual(self.task,", "'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch): entity = {\"resource_id\":", "self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "updated_data self.resource_client.refresh() self.assertEqual(self.resource_client.data, updated_data) @mock.patch.object(connection, \"post\") def test_create_uri(self, mock_post): dict_to_create", "timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_default_api_version_300(self, mock_post): dict_to_create", "\"member\"}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri", "300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( uri, dict_info, custom_headers={\"Extra\": \"extra\", \"Content-Type\":", "filter=\"startDate=2016-05-30T03:29:42.361Z\", refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\"", "self.response_body self.connection._apiVersion = 200 expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V200}", "mock_get.assert_called_once_with(uri) def test_get_with_uri_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI for this resource\"", "mock_ensure_resource): self.resource_client.data = {\"uri\": \"/rest/test\"} self.resource_client.update(data={\"name\": \"test\"}) mock_do_put.assert_called_once() mock_ensure_resource.assert_called_once() def", "= \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = True mock_open.return_value =", "\"?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, \"get\") def test_get_collection_should_return_list(self, mock_get): mock_get.return_value = {\"members\": [{\"key\":", "\\ \"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27\" \\ \"&query=name%20NE%20%27WrongName%27\" \\ \"&sort=name%3Aascending\".format(resource_uri=self.URI) self.assertEqual([{\"member\": \"member\"}], result) mock_get.assert_called_once_with(uri)", "body methods\"\"\" class StubResourcePatch(ResourcePatchMixin, Resource): \"\"\"Stub class to test resource", "test_update_with_zero_body_called_once(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value = self.task, self.task mock_wait4task.return_value =", "mock_get_by): mock_get_by.return_value = [{\"name\": \"value\"}] response = self.resource_client.get_by_name('Resource Name,') self.assertEqual(response,", "'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US', 'Content-Type': 'application/json-patch+json'})", "test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def", "dict( resource=\"/rest/testuri/3\", subresource=\"4\", path=\"sub\", uri=\"/rest/testuri/3/sub/4\"), dict( resource=\"5\", subresource=\"/rest/testuri/5/sub/6\", path=\"sub\", uri=\"/rest/testuri/5/sub/6\"),", "= ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1], 'members': [{'id':", "mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" fake_file = io.StringIO()", "'wait_for_task') def test_update_return_entity(self, mock_wait4task, mock_put): dict_to_update = { \"resource_name\": \"a", "'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body", "def test_merge_resources(self): resource1 = {\"name\": \"resource1\", \"type\": \"resource\"} resource2 =", "mock_open.return_value = io.StringIO() result = self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, 'download_to_stream')", "test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put): response_body = {\"resource_name\": \"name\"} mock_put.return_value = self.task,", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_uri(self, mock_wait4task, mock_update): dict_to_update = {\"resource_data\": \"resource_data\",", "uri='/rest/testuri/5/sub/6'), dict( resource='/rest/testuri/7', subresource='/rest/testuri/7/sub/8', path='sub', uri='/rest/testuri/7/sub/8'), dict( resource=None, subresource='/rest/testuri/9/sub/10', path='sub',", "}] mock_patch.return_value = {}, {} self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with( \"/rest/testuri\",", "= con self._client = ResourceClient(con, \"/rest/fake/resource\") def get_fake(self, uri): return", "= {\"uri\": \"/rest/testuri\"} self.resource_client.delete(force=True) mock_delete.assert_called_once_with(\"/rest/testuri?force=True\", custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\")", "self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_uri_with_force(self, mock_post):", "custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v200(self, mock_patch): mock_patch.return_value = {},", "self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"})", "mock_get.return_value = [] response = self.resource_client.get_by_id(\"123\") self.assertIsNone(response) mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\")", "uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources')", "by OneView. \"\"\" uri_list = [\"/rest/testuri?start=0&count=3\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results =", "dict_to_update = {\"resource_data\": \"resource_data\", \"uri\": \"a_uri\"} mock_update.return_value = self.task, self.response_body", "mock_delete.return_value = {}, {} delete_result = self.resource_client.delete(resource) self.assertTrue(delete_result) mock_delete.assert_called_once_with(\"uri\", custom_headers=None)", "timeout=-1) mock_wait4task.assert_called_with(self.task, timeout=-1) self.assertEqual(self.task, delete_task) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "class ResourceTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client =", "e.args[0]) else: self.fail() @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_called_once(self, mock_wait4task,", "{}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_wait_task_completion(self, mock_get_completed_task, mock_post): task_with_output", "{\"resource_name\": \"a name\", \"type\": \"anotherType\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create,", "def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\"", "self.response_body mock_wait4task.return_value = self.task update_task = self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, update_task)", "'Extra': 'extra'} self.connection._apiVersion = 300 resource_client = ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id',", "try: self.resource_client.delete({}) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else:", "'&sort=name%3Aascending' \\ '&view=%22%7Bview-name%7D%22' \\ '&fields=name%2Cowner%2Cmodified' \\ '&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI) self.assertEqual([{'member': 'member'}], result)", "= self.task self.resource_client.create_with_zero_body() mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor,", "def test_update_without_default_values(self, mock_put): dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\"", "def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value =", "{\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body resource_client", "to test resource utilization methods\"\"\" class StubResourceSchema(ResourceSchemaMixin, Resource): \"\"\"Stub class", "mock_put): response_body = {\"resource_name\": \"name\"} mock_put.return_value = self.task, self.task mock_wait4task.return_value", "resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US', 'Content-Type':", "def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value =", "{'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]}, {'nextPageUri':", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_multiple_filters(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization( fields=\"AmbientTemperature,AveragePower,PeakPower\",", "mock_ensure_resource): mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.URI =", "mock_wait4task.return_value = self.task self.resource_client.URI = \"/rest/enclosures\" self.resource_client.update_with_zero_body(\"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\",", "mock_wait4task.assert_called_with(self.task, timeout=-1) self.assertEqual(self.task, delete_task) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_by_id_called_once(self,", "timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_custom_headers(self, mock_post): dict_to_create", "= {\"resource_name\": \"a name\", \"force\": \"yes\"} mock_post.return_value = {}, {}", "resource = {'name': 'resource1'} default_values = { '200': {\"type\": \"EnclosureGroupV200\"},", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body = {\"resource_name\": \"name\"}", "= \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.connection._apiVersion = 200 expected_dict", "\"\"\" uri_list = ['/rest/testuri?start=0&count=3', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1],", "[call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get') def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get):", "[{\"name\": \"value\"}] response = self.resource_client.get_by_name('Resource Name,') self.assertEqual(response, {\"name\": \"value\"}) mock_get_by.assert_called_once_with(\"name\",", "exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail(\"Expected Exception was not", "in options: uri = self.resource_client._helper.build_subresource_uri(option[\"resource\"], option[\"subresource\"], option[\"path\"]) self.assertEqual(uri, option[\"uri\"]) def", "mock_wait4task.assert_called_once_with(self.task, mock.ANY) def test_delete_with_none(self): try: self.resource_client.delete(None) except ValueError as e:", "\"put\") def test_update_with_force(self, mock_put, mock_laod_resource): dict_to_update = {\"name\": \"test\"} uri", "result = self.resource_client.get_all(count=15) expected_items = [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\":", "else: self.fail() def test_update_with_none(self): try: self.resource_client.update(None) except ValueError as e:", "{\"id\": \"5\"}, {\"id\": \"6\"}]}, {\"nextPageUri\": None, \"members\": [{\"id\": \"7\"}]}] mock_get.side_effect", "mock_ensure_resource): self.resource_client.get_utilization( fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=[\"startDate=2016-05-30T03:29:42.361Z\", \"endDate=2016-05-31T03:29:42.361Z\"], refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\"", "mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_return_entity(self, mock_wait4task,", "@mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_custom_headers(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value", "uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, fake_response_body", "\"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): response_body =", "\"get\") def test_get_utilization_by_uri_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri)", "\"\"\"Stub class to test resource patch operations\"\"\" class StubResourceUtilization(ResourceUtilizationMixin, Resource):", "self.response_body self.resource_client.update(dict_to_update, uri=\"/path\", custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put')", "300) self.resource_client = StubResource(self.connection) super(ResourceTest, self).setUp(self.resource_client) self.resource_helper = ResourceHelper(self.URI, self.connection,", "'get') def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce = mock.Mock() uri", "= [{\"resource_name\": \"a name\"}] mock_post.return_value = {}, {} resource_client =", "def test_should_not_merge_when_default_values_not_defined(self): resource = {'name': 'resource1'} default_values = {} expected", "file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_open.return_value = io.StringIO() self.resource_client.download(uri,", "uri=\"/rest/testuri/5/sub/6\"), dict( resource=\"/rest/testuri/7\", subresource=\"/rest/testuri/7/sub/8\", path=\"sub\", uri=\"/rest/testuri/7/sub/8\"), dict( resource=None, subresource=\"/rest/testuri/9/sub/10\", path=\"sub\",", "ResourceClient(con, \"/rest/fake/resource\") def get_fake(self, uri): return self._client.get(uri) class ResourceClientTest(unittest.TestCase): URI", "= 200 self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"})", "dict( resource='1', subresource='2', path='sub', uri='/rest/testuri/1/sub/2'), dict( resource='/rest/testuri/3', subresource='4', path='sub', uri='/rest/testuri/3/sub/4'),", "get_fake(self, uri): return self._client.get(uri) class ResourceClientTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200", "'/rest/testuri/123a53cz', 'replace', '/name', 'new_name', 60) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'})", "\"wait_for_task\") def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value", "mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})", "}] mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch(\"replace\", \"/name\",", "mock_get, mock_ensure_resource): self.resource_client.get_utilization(fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=\"startDate=2016-05-30T03:29:42.361Z\", refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\", "Name,\") self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(connection, \"get\") def test_get_by_uri(self, mock_get):", "e.args[0]) else: self.fail() def test_create_with_none(self): try: self.resource_client.create(None) except ValueError as", "self.assertEqual(result, fake_response_body) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path", "= {}, {} expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V300}", "setUp(self): super(ResourceClientTest, self).setUp() self.host = '127.0.0.1' self.connection = connection(self.host, 300)", "mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri(self,", "TYPE_V300 = \"typeV300\" DEFAULT_VALUES = { \"200\": {\"type\": TYPE_V200}, \"300\":", "mock_get): uri = \"/rest/testuri?start=0&count=-1\" members = [{\"id\": \"1\"}, {\"id\": \"2\"},", "\"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v300(self, mock_patch, mock_ensure_resource): mock_patch.return_value = {},", "dict( resource='/rest/testuri/11', subresource='12', path='/sub/', uri='/rest/testuri/11/sub/12'), dict( resource='/rest/testuri/13', subresource=None, path='/sub/', uri='/rest/testuri/13/sub'),", "\"&refresh=true\" \\ \"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_id_with_defaults(self,", "'wait_for_task') def test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value = self.task, {} mock_wait4task.return_value", "utf-8 -*- ### # (C) Copyright [2019] Hewlett Packard Enterprise", "def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri = \"/rest/testuri/\" filepath =", "'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart):", "mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_open.return_value = io.StringIO()", "\"a name\"} mock_patch.return_value = {}, {} headers = {\"Content-Type\": \"application/json\",", "\"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_multiple_filters(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization( fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=[\"startDate=2016-05-30T03:29:42.361Z\",", "def test_get_all_with_custom_uri(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources') uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get')", "mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} collection = self.resource_client.get_collection('12345')", "@mock.patch.object(connection, \"put\") def test_update_with_uri_called_once(self, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update", "self.URI) resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US',", "test_delete_all_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task", "sort = \"name:ascending\" query = \"name NE 'WrongName'\" mock_get.return_value =", "'/rest/testuri/09USE7335NW35' result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input =", "\\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\ \"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "raised\") @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_do_post_request(self, mock_get_completed_task, mock_post): task_with_output", "mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock() uri = '/rest/testuri/' filepath =", "expected) @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart(self, mock_post_multipart): uri = '/rest/testuri/' filepath", "mock_wait4task.not_been_called() @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get,", "expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_without_default_values(self, mock_put, mock_ensure_resource):", "expected_output = \"/rest/testuri/09USE7335NW35\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self):", "\"123a53cz\"} mock_patch.return_value = self.task, self.task mock_wait4task.return_value = entity result =", "\"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with(uri, dict_info, custom_headers=headers)", "def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None, self.response_body result = self.resource_client.update_with_zero_body(", "mock_post.return_value = None, self.response_body new_resource = self.resource_client.create_with_zero_body() self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(Resource,", "= 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( uri, dict_info, custom_headers={\"Extra\": \"extra\",", "mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_open.return_value = io.StringIO()", "\"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value = self.task,", "test_patch_request_custom_headers(self, mock_task, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" dict_info = {\"resource_name\":", "self.resource_client.update(data={\"name\": \"test\"}) mock_do_put.assert_called_once() mock_ensure_resource.assert_called_once() def test_ensure_resource_raise_unique_identifier_exception(self): self.resource_client.data = [] self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers,", "{\"name\": \"resource2\"} result_list = merge_default_values([resource1, resource2], default_type) expected_list = [", "{\"name\": \"value\"}) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_without_result(self, mock_get_by):", "@mock.patch.object(ResourceClient, 'get_all') def test_get_by_property(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient,", "self.response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'put')", "mock_put): dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value =", "test_should_not_merge_when_default_values_not_defined(self): resource = {'name': 'resource1'} default_values = {} expected =", "self.assertEqual(dict_transformed, {'5': True, 'four': True, 'one': True, 'tree': 3, 'two':", "mock_put): dict_to_update = { \"resource_name\": \"a name\", \"uri\": \"a_uri\", }", "expected_uri = \"/rest/testuri\" mock_put.assert_called_once_with(expected_uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\")", "in writing, software # distributed under the License is distributed", "ResourceClient(self.connection, self.URI) resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language':", "mock.Mock() mock_wait4task.return_value = fake_associated_resurce result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce)", "= self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(result, dict_to_update) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "exception: self.assertEqual(message, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection,", "mock_ensure_resource): mock_patch.return_value = {}, {} self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY,", "= \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' fake_file = io.StringIO() mock_open.return_value =", "\"uri\": uri, \"type\": \"typeV300\"} mock_update.return_value = self.task, self.response_body mock_wait4task.return_value =", "\"test\", \"type\": self.TYPE_V300, \"uri\": uri} self.resource_client._merge_default_values() self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None)", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_default_api_version_300(self, mock_put, mock_ensure_resource): dict_to_update =", "\"6\"}, {\"id\": \"7\"}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get):", "custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_uri_called_once(self, mock_put, mock_ensure_resource):", "test_create_with_api_version_200(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value = {},", "mock_update.return_value = self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.update(dict_to_update, False) self.assertEqual(self.task,", "{'nextPageUri': None, 'members': [{'id': '7'}, {'id': '8'}]}] mock_get.side_effect = results", "self.resource_client.delete('1', force=True, timeout=-1) self.assertEqual(self.task, delete_task) mock_delete.assert_called_once_with(self.URI + \"/1?force=True\", custom_headers=None) @mock.patch.object(connection,", "300) self.resource_client = StubResourceZeroBody(self.connection) super(ResourceZeroBodyMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_custom_headers(self, mock_put, mock_ensure_resource): dict_to_update", "response = self.resource_client.get_by_name('Resource Name,') self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(connection, 'get')", "70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={}) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v300(self, mock_patch):", "resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type':", "expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V200} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri,", "self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_empty_str_should_raise_exception(self):", "= self.task, self.task mock_wait4task.return_value = response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration',", "'&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_with_multiple_filters(self,", "{} self.connection._apiVersion = 200 self.resource_client._merge_default_values() expected_dict = {\"resource_name\": \"a name\",", "mock_post.return_value = {}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1)", "\"put\") def test_update_should_not_override_resource_properties(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\", \"type\":", "uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' fake_file = io.StringIO() mock_open.return_value = fake_file self.resource_client.download(uri,", "= \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = False mock_open.return_value =", "{} mock_wait4task.return_value = created_resource result = self.resource_client.create(dict_to_create, -1) self.assertEqual(result, created_resource)", "mock_put): mock_put.return_value = None, self.response_body self.resource_client.URI = \"/rest/enclosures\" result =", "mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_return_entity(self, mock_wait4task,", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value = self.task,", "self.task self.resource_client.create_with_zero_body(custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "= None, {} try: self.resource_client.create_report(\"/rest/path/create-report\") except exceptions.HPOneViewException as exception: self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED,", "self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('1', custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})", "@mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = \"/rest/testuri?start=0&count=-1\" members =", "\"anotherType\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)", "{'id': '3'}, {'id': '4'}, {'id': '5'}, {'id': '6'}, {'id': '7'}]", "uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path", "test_get_by_property_with__invalid_uri(self, mock_get_all): try: self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/other/5435534/sub') except exceptions.HPOneViewUnknownType as e:", "\"resource2\", \"type\": \"type1\"} ] self.assertEqual(result_list, expected_list) def test_raise_unavailable_method_exception(self): self.assertRaises(exceptions.HPOneViewUnavailableMethod, unavailable_method)", "'get_completed_task') def test_create_report_should_do_post_request(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] =", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{'nextPageUri': uri_list[1],", "dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value = {}, {} self.connection._apiVersion", "= self.task, self.task mock_wait4task.return_value = entity result = self.resource_client.patch( '123a53cz',", "License, Version 2.0 (the \"License\"); # you may not use", "= self.task, {} mock_wait4task.return_value = dict_to_update result = self.resource_client.update(dict_to_update, timeout=-1)", "None, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update):", "StubResourcePatch(self.connection) super(ResourcePatchMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v200(self, mock_patch,", "mock_post.return_value = {}, {} new_instance = self.resource_client.create({}) self.assertNotEqual(self.resource_client, new_instance) @mock.patch.object(connection,", "filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with('/rest/testuri', mock.ANY,", "self.task self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_post.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", {}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor,", "'2'}, {'id': '3'}] mock_get.return_value = { 'nextPageUri': uri, 'members': members,", "self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=15\",", "self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") def", "uri=\"/rest/testuri/11/sub/12\"), dict( resource=\"/rest/testuri/13\", subresource=None, path=\"/sub/\", uri=\"/rest/testuri/13/sub\"), ] for option in", "self.assertEqual(result, []) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_called_once(self, mock_wait4task, mock_delete):", "mock_patch.return_value = self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch( '123a53cz', 'replace',", "filter = \"name='Exchange Server'\" uri = \"/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True\" self.resource_client.delete_all(filter=filter, force=True, timeout=-1)", "'&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_with_multiple_filters(self, mock_get): self.resource_client.get_utilization( '09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower',", "@mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_output_list_when_results(self, mock_get_completed_task, mock_post): task_output = [ {\"type\":", "'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri = '/rest/testuri/'", "= dict_to_update result = self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(result, dict_to_update) @mock.patch.object(connection, 'post')", "\"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri", "mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\")", "filepath, \"SPPgen9snap6.2015_0405.81.iso\") @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{'nextPageUri': uri_list[1],", "\"/rest/test/another/resource/uri/09USE7335NW3\") except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected Exception", "{} self.connection._apiVersion = 200 expected_dict = {\"resource_name\": \"a name\", \"type\":", "self.assertEqual(message, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self):", "test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{'nextPageUri':", "self.assertTrue(\"field\" in e.args[0]) else: self.fail() @mock.patch.object(connection, 'get') def test_get_with_uri_should_work(self, mock_get):", "self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client._helper.build_uri(None) except ValueError as exception:", "response_body) @mock.patch.object(connection, 'post') def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None, self.response_body", "300) self.resource_client = StubResourceFileHandler(self.connection) super(ResourceFileHandlerMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart(self,", "\"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_uri_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri =", "= results self.resource_client.get_all(count=15) expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls)", "self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, \"get\")", "'get') def test_get_collection_with_filter(self, mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345', 'name=name') mock_get.assert_called_once_with(self.URI", "'post_multipart_with_response_handling') def test_upload_should_call_post_multipart(self, mock_post_multipart): uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "\"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource):", "self.resource_helper.get_all(uri=\"/rest/other/resource/12467836/subresources\") except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail(\"Expected Exception", "the License for the specific language governing permissions and #", "mock_patch, mock_ensure_resource): request_body = [{ \"op\": \"replace\", \"path\": \"/name\", \"value\":", "'/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a' mock_get.return_value = {\"members\": [{\"member\": \"member\"}]} result = self.resource_client.get_all( 1,", "custom_headers={}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v300(self, mock_patch, mock_ensure_resource): request_body", "\"/rest/testuri\"} self.resource_client.delete(custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def", "= \"{resource_uri}?start=1\" \\ \"&count=500\" \\ \"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27\" \\ \"&query=name%20NE%20%27WrongName%27\" \\ \"&sort=name%3Aascending\".format(resource_uri=self.URI)", "uri='/rest/testuri/7/sub/8'), dict( resource=None, subresource='/rest/testuri/9/sub/10', path='sub', uri='/rest/testuri/9/sub/10'), dict( resource='/rest/testuri/11', subresource='12', path='/sub/',", "= self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.data = {\"uri\": \"/rest/testuri\"}", "def test_get_all_called_once(self, mock_get): filter = \"'name'='OneViewSDK \\\"Test FC Network'\" sort", "'members': members, 'uri': uri } result = self.resource_client.get_all() self.assertSequenceEqual(result, members)", "mock.ANY, mock.ANY) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_wait_for_task_when_response_is_task(self,", "mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( '/rest/testuri', {}, custom_headers=None) @mock.patch.object(connection, 'post')", "= response_body result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection,", "mock_get_completed_task.return_value = task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, task_output) @mock.patch.object(connection, 'post')", "= \"name='Exchange Server'\" delete_task = self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_wait4task.assert_called_with(self.task, timeout=-1)", "mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\")", "@mock.patch.object(connection, \"put\") def test_update_should_not_override_resource_properties(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\",", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "= connection('127.0.0.1', 300) self.resource_client = StubResourceUtilization(self.connection) super(ResourceUtilizationMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\")", "= {}, {} self.connection._apiVersion = 200 self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers)", "def test_update_with_force(self, mock_put): dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\"", "@mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {\"nextPageUri\": None, \"members\":", "self.task, self.task mock_wait4task.return_value = response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1)", "\"anotherType\"} mock_post.return_value = {}, {} expected = {\"resource_name\": \"a name\",", "test_patch_with_custom_headers_v300(self, mock_patch): mock_patch.return_value = {}, {} resource_client = ResourceClient(self.connection, self.URI)", "= \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = True mock_open.return_value =", "name\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)", "provides a maximum number of results to be returned but", "uri = self.resource_client._helper.build_subresource_uri(option[\"resource\"], option[\"subresource\"], option[\"path\"]) self.assertEqual(uri, option[\"uri\"]) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try:", "\"EnclosureGroupV300\"} resource_client = ResourceClient(self.connection, self.URI) result = resource_client.merge_default_values(resource, default_values) self.assertEqual(result,", "mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task", "self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, self.resource_client.data) mock_update.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "= {'Content-Type': 'application/json', 'Extra': 'extra'} self.connection._apiVersion = 300 resource_client =", "{} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\") mock_post.assert_called_once_with(\"/rest/path/create-report\", {}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor,", "self.task mock_wait4task.return_value = entity self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', -1)", "= results result = self.resource_client.get_all() expected_items = [{'id': '1'}, {'id':", "resource_client = ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection,", "{} headers = {\"Extra\": \"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info,", "'path': '/name', 'value': 'new_name', }] mock_patch.return_value = {}, {} resource_client", "io.StringIO() result = self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def", "self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream):", "self.assertEqual(id, extracted_id) def test_extract_id_from_uri_with_extra_slash(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/' extracted_id = extract_id_from_uri(uri)", "mock_patch.return_value = {}, {} self.resource_client.patch( '/rest/testuri/123a53cz', 'replace', '/name', 'new_name', 60)", "resource='/rest/testuri/7', subresource='/rest/testuri/7/sub/8', path='sub', uri='/rest/testuri/7/sub/8'), dict( resource=None, subresource='/rest/testuri/9/sub/10', path='sub', uri='/rest/testuri/9/sub/10'), dict(", "raised\") def test_build_subresource_uri(self): options = [ dict( resource='1', subresource='2', path='sub',", "and # limitations under the License. ### import io import", "test_get_utilization_by_id_with_defaults(self, mock_get): self.resource_client.get_utilization('09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def", "@mock.patch.object(ResourceHelper, \"do_put\") @mock.patch.object(Resource, \"ensure_resource_data\") def test_ensure_resource_should_call_once(self, mock_do_put, mock_ensure_resource): self.resource_client.data =", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_patch_return_entity(self, mock_wait4task, mock_patch, mock_ensure_resource): entity = {\"resource_id\":", "mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_helper.get_collection() mock_get.assert_called_once_with(self.URI)", "self.resource_client.delete(resource) self.assertTrue(delete_result) mock_delete.assert_called_once_with(\"uri\", custom_headers=None) def test_delete_with_empty_dict(self): try: self.resource_client.delete({}) except ValueError", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "\"wait_for_task\") def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value", "was not raised\") @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_do_post_request(self, mock_get_completed_task,", "def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {'nextPageUri': None, 'members': None} result", "self.resource_client.update(dict_to_update) expected_uri = \"/rest/testuri\" mock_put.assert_called_once_with(expected_uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/testuri/5435534/sub') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri/5435534/sub') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with__invalid_uri(self,", "self.resource_client.get_all() expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get')", "'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_uri(self, mock_wait4task, mock_update): dict_to_update = {\"resource_data\":", "= {\"uri\": \"/uri/test\"} mock_do_get.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(Resource,", "\"testname\"} mock_do_get.return_value = get_by_return_value self.resource_client.ensure_resource_data(update_data=True) self.assertEqual(self.resource_client.data, get_by_return_value[0]) @mock.patch.object(Resource, \"get_by\") def", "test_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None, self.response_body filter = \"name='Exchange Server'\"", "expected_list = [ {\"name\": \"resource1\", \"type\": \"type1\"}, {\"name\": \"resource2\", \"type\":", "+ \"?filter=name%3Dname\") @mock.patch.object(connection, \"get\") def test_get_collection_with_path(self, mock_get): mock_get.return_value = {}", "Hewlett Packard Enterprise Development LP # # Licensed under the", "\"/rest/resource/test\" mock_put.return_value = None, self.response_body expected_dict = {\"name\": \"test\", \"type\":", "= {\"name\": \"test\", \"type\": self.TYPE_V200} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict,", "in e.args[0]) else: self.fail() def test_create_with_none(self): try: self.resource_client.create(None) except ValueError", "test_get_by_with_name_none(self): try: self.resource_client.get_by(None, None) except ValueError as e: self.assertTrue(\"field\" in", "result = self.resource_client.download(uri, file_path) self.assertFalse(result) class ResourceZeroBodyMixinTest(BaseTest): def setUp(self): self.connection", "mock_post_multipart): uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None,", "\"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v200(self, mock_patch): request_body", "self).setUp(self.resource_client) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value", "subresource='/rest/testuri/7/sub/8', path='sub', uri='/rest/testuri/7/sub/8'), dict( resource=None, subresource='/rest/testuri/9/sub/10', path='sub', uri='/rest/testuri/9/sub/10'), dict( resource='/rest/testuri/11',", "self.resource_client.URI = self.URI self.resource_client.DEFAULT_VALUES = self.DEFAULT_VALUES self.resource_client.data = {\"uri\": \"/rest/testuri\"}", "name\", \"type\": \"anotherType\"} mock_post.return_value = {}, {} expected = {\"resource_name\":", "[call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get') def test_get_all_should_return_all_items_when_response_paginated(self, mock_get):", "def test_extract_id_from_uri_passing_id(self): uri = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '3518be0e-17c1-4189-8f81-83f3724f6155')", "try: self.resource_client.build_uri('/rest/') except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected", "uri) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path =", "self.resource_client) @mock.patch.object(connection, \"post\") def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None, self.response_body", "\"new_name\", }] mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch(\"replace\",", "resource file operations\"\"\" class StubResourceZeroBody(ResourceZeroBodyMixin, Resource): \"\"\"Stub class to test", "None, 'members': [{'id': '7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=15)", "\"/12345\") @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_with_result(self, mock_get_by): mock_get_by.return_value = [{\"name\": \"value\"}]", "mock_wait4task, mock_post): dict_to_create = { \"resource_name\": \"a name\", } created_resource", "uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{'nextPageUri': uri_list[1], 'members':", "@mock.patch.object(connection, 'get') def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri)", "self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_update_with_empty_dict(self): try: self.resource_client.update({}) except", "mock_get_all.return_value = [{\"name\": \"expected\"}, {\"name\": \"not expected\"}] response = self.resource_client.get_by('connection.name',", "response) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_custom_headers(self, mock_put): dict_to_update", "def test_upload_should_call_post_multipart(self, mock_post_multipart): uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "dict_to_delete = {\"task\": \"task\", \"uri\": \"\"} try: self.resource_client.delete(dict_to_delete, False, -1)", "subresource=\"12\", path=\"/sub/\", uri=\"/rest/testuri/11/sub/12\"), dict( resource=\"/rest/testuri/13\", subresource=None, path=\"/sub/\", uri=\"/rest/testuri/13/sub\"), ] for", "test_patch_with_custom_headers_v200(self, mock_patch): mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch('/rest/testuri/123',", "in e.args[0]) else: self.fail() def test_update_with_empty_dict(self): try: self.resource_client.update({}) except ValueError", "as exception: self.assertEqual(message, exception.args[0]) else: self.fail(\"Expected Exception was not raised\")", "{\"name\": \"resource2\", \"port\": \"1\"} expected_resource = {\"name\": \"resource2\", \"type\": \"resource\",", "self.response_body = {\"body\": \"body\"} self.custom_headers = {'Accept-Language': 'en_US'} @mock.patch.object(connection, 'get')", "@mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with_uri(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/testuri/5435534/sub') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri/5435534/sub')", "# distributed under the License is distributed on an \"AS", "'SPPgen9snap6.2015_0405.81.iso') @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "\"6\"}]}, {\"nextPageUri\": None, \"members\": [{\"id\": \"7\"}]}] mock_get.side_effect = results result", "self.response_body filter = \"name='Exchange Server'\" result = self.resource_client.delete_all(filter=filter, force=True, timeout=-1)", "['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'},", "Exception was not raised') @mock.patch.object(connection, 'get') def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list", "# Unless required by applicable law or agreed to in", "self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(force=True) mock_delete.assert_called_once_with(\"/rest/testuri?force=True\", custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "self.resource_client.patch( '/rest/testuri/123a53cz', 'replace', '/name', 'new_name', 60) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type':", "custom_headers=headers) mock_patch.assert_called_once_with(uri, dict_info, custom_headers=headers) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\")", "mock_get): filter = \"'name'='OneViewSDK \\\"Test FC Network'\" sort = 'name:ascending'", "{'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=15) expected_calls = [call(uri_list[0]), call(uri_list[1]),", "mock_patch.return_value = {}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.patch( '123a53cz',", "= '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = False mock_open.return_value = io.StringIO() result =", "self.response_body mock_wait4task.return_value = self.task self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, self.resource_client.data) mock_update.assert_called_once_with(uri, expected,", "\"resource\"} resource2 = {\"name\": \"resource2\", \"port\": \"1\"} expected_resource = {\"name\":", "500, filter, query, sort, view, 'name,owner,modified', scope_uris=scope_uris) uri = '{resource_uri}?start=1'", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "\\ '&count=500' \\ '&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27' \\ '&query=name%20NE%20%27WrongName%27' \\ '&sort=name%3Aascending' \\ '&view=%22%7Bview-name%7D%22'", "@mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_by_id_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value =", "def test_get_collection_with_filter(self, mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345', 'name=name') mock_get.assert_called_once_with(self.URI +", "merge_resources(resource1, resource2) self.assertEqual(merged_resource, expected_resource) def test_merge_default_values(self): default_type = {\"type\": \"type1\"}", "raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri( \"/rest/test/another/resource/uri/09USE7335NW3\") except exceptions.HPOneViewUnknownType as exception:", "self).setUp(self.resource_client) @mock.patch.object(connection, \"get\") def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\")", "response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'put')", "option in options: uri = self.resource_client._helper.build_subresource_uri(option[\"resource\"], option[\"subresource\"], option[\"path\"]) self.assertEqual(uri, option[\"uri\"])", "uri = \"/rest/testuri\" mock_put.return_value = None, self.response_body expected = {\"name\":", "self.resource_client.get(uri) except exceptions.HPOneViewUnknownType as exception: self.assertEqual(message, exception.args[0]) else: self.fail(\"Expected Exception", "self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_not_wait_for_task_when_response_is_not_task(self,", "= [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}, {\"id\": \"4\"}, {\"id\":", "query, sort, view, 'name,owner,modified', scope_uris=scope_uris) uri = '{resource_uri}?start=1' \\ '&count=500'", "uri, 'members': members, 'uri': uri } result = self.resource_client.get_all() self.assertSequenceEqual(result,", "resource_client = ResourceClient(self.connection, self.URI) resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', 70)", "else: self.fail() def test_get_by_with_name_none(self): try: self.resource_client.get_by(None, None) except ValueError as", "custom_headers=headers) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers(self, mock_task, mock_patch): dict_info", "'en_US'}) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put): response_body", "resource='/rest/testuri/3', subresource='4', path='sub', uri='/rest/testuri/3/sub/4'), dict( resource='5', subresource='/rest/testuri/5/sub/6', path='sub', uri='/rest/testuri/5/sub/6'), dict(", "class StubResourceUtilization(ResourceUtilizationMixin, Resource): \"\"\"Stub class to test resource utilization methods\"\"\"", "{}, {} self.resource_client.create(dict_to_create, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def", "mock_download_to_stream.return_value = True mock_open.return_value = io.StringIO() result = self.resource_client.download(uri, file_path)", "'8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self,", "= \"typeV300\" DEFAULT_VALUES = { \"200\": {\"type\": TYPE_V200}, \"300\": {\"type\":", "self.task, {} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\", timeout=60) mock_get_completed_task.assert_called_once_with(self.task, 60) @mock.patch.object(connection,", "ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_create_with_empty_dict(self):", "StubResourceUtilization(ResourceUtilizationMixin, Resource): \"\"\"Stub class to test resource utilization methods\"\"\" class", "test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {\"nextPageUri\": None, \"members\": None} result =", "{}, {} new_instance = self.resource_client.create({}) self.assertNotEqual(self.resource_client, new_instance) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor,", "{} mock_wait4task.return_value = dict_to_update self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(self.resource_client.data, dict_to_update) @mock.patch.object(Resource, \"get_by\")", "dict( resource='/rest/testuri/3', subresource='4', path='sub', uri='/rest/testuri/3/sub/4'), dict( resource='5', subresource='/rest/testuri/5/sub/6', path='sub', uri='/rest/testuri/5/sub/6'),", "= [] self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers, self.resource_client.ensure_resource_data) @mock.patch.object(ResourceHelper, \"do_get\") def test_ensure_resource_raise_resource_not_found_exception_with_uri(self, mock_do_get): self.resource_client.data", "else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client.build_uri('/rest/')", "\"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_uri_is_provided(self, mock_patch, mock_ensure_resource): request_body", "= \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath,", "= fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, uri, custom_headers=mock.ANY) @mock.patch.object(connection,", "\"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}] mock_get.return_value = { \"nextPageUri\": uri,", "@mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value =", "the Apache License, Version 2.0 (the \"License\"); # you may", "mock_patch.assert_called_once_with(uri, dict_info, custom_headers=headers) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def", "dict_to_create = {\"resource_name\": \"a name\", \"type\": \"anotherType\"} mock_post.return_value = {},", "= dict_to_update self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(self.resource_client.data, dict_to_update) @mock.patch.object(Resource, \"get_by\") def test_get_by_name_with_result(self,", "test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=15\", \"/rest/testuri?start=3&count=3\",", "\"1\"} merged_resource = merge_resources(resource1, resource2) self.assertEqual(merged_resource, expected_resource) def test_merge_default_values(self): default_type", "self.URI) self.task = {\"task\": \"task\", \"taskState\": \"Finished\"} self.response_body = {\"body\":", "= {\"name\": \"test\", \"type\": \"typeV300\"} self.resource_client.data = {'uri': uri} expected", "self.resource_client.data = {\"uri\": \"/rest/test\"} self.resource_client.update(data={\"name\": \"test\"}) mock_do_put.assert_called_once() mock_ensure_resource.assert_called_once() def test_ensure_resource_raise_unique_identifier_exception(self):", "{\"members\": [{\"member\": \"member\"}]} result = self.resource_client.get_all( 1, 500, filter, query,", "'name=name') mock_get.assert_called_once_with(self.URI + \"/12345?filter=name%3Dname\") @mock.patch.object(connection, 'get') def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value", "= self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('1', custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY,", "= self.task, self.response_body mock_wait4task.return_value = self.task delete_task = self.resource_client.delete('1', force=True,", "self.fail() def test_create_with_none(self): try: self.resource_client.create(None) except ValueError as e: self.assertTrue(\"Resource\"", "mock_post): response_body = {\"resource_name\": \"name\"} mock_post.return_value = self.task, self.task mock_wait4task.return_value", "= 'typeV200' TYPE_V300 = 'typeV300' DEFAULT_VALUES = { '200': {'type':", "Name,') @mock.patch.object(connection, 'get') def test_get_collection_uri(self, mock_get): mock_get.return_value = {\"members\": [{\"key\":", "mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( '/rest/testuri',", "None, mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, \"SPPgen9snap6.2015_0405.81.iso\") @mock.patch.object(connection, \"post_multipart_with_response_handling\") def", "mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body(uri=\"/rest/testuri\", custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY,", "mock_wait4task.return_value = created_resource result = self.resource_client.create(dict_to_create, -1) self.assertEqual(result, created_resource) @mock.patch.object(connection,", "= \"typeV200\" TYPE_V300 = \"typeV300\" DEFAULT_VALUES = { \"200\": {\"type\":", "\"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self,", "\"value\"}]} self.resource_client.get_collection('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(connection, 'get') def test_get_collection_with_filter(self, mock_get):", "'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_patch_return_entity(self, mock_wait4task, mock_patch): entity", "= None, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task')", "'/name', 'value': 'new_name', }] mock_patch.return_value = {}, {} self.connection._apiVersion =", "def test_ensure_resource_without_data_update(self, mock_get_by): mock_get_by.return_value = [] actual_result = self.resource_client.ensure_resource_data(update_data=False) expected_result", "uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources\")", "test_get_collection_with_filter(self, mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345', 'name=name') mock_get.assert_called_once_with(self.URI + \"/12345?filter=name%3Dname\")", "self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\", \"Content-Type\": \"application/json-patch+json\"})", "'sub-path') except exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else: self.fail(\"Expected Exception", "= self.resource_client.create_with_zero_body(timeout=-1) self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(connection, \"post\") def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value", "mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get):", "mock.ANY) def test_delete_with_none(self): try: self.resource_client.delete(None) except ValueError as e: self.assertTrue(\"Resource\"", "self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_called_once(self, mock_wait4task,", "mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345', 'name=name') mock_get.assert_called_once_with(self.URI + \"/12345?filter=name%3Dname\") @mock.patch.object(connection,", "@mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_empty_list_when_output_is_empty(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput']", "{\"name\": \"test\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=\"/path\", custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY,", "\"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def", "self.response_body expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V300, \"uri\": uri} self.resource_client._merge_default_values()", "mock_update): mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1)", "'get_all') def test_get_by_with_incorrect_result_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\": \"EXpected\"}, {\"name\": \"not", "'wait_for_task') def test_delete_all_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value", "True, 'four': True, 'one': True, 'tree': 3, 'two': True}) def", "@mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v200(self, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" request_body", "= ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, \"post\")", "None, 'members': None} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'delete')", "utilization methods\"\"\" class StubResourceSchema(ResourceSchemaMixin, Resource): \"\"\"Stub class to test resource", "test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value') uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def", "test_update_with_default_api_version_300(self, mock_put): dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value", "\"\"\"Stub class to test resoruce zero body methods\"\"\" class StubResourcePatch(ResourcePatchMixin,", "{} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_uri_with_force(self,", "mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = False", "= \"Unrecognized URI for this resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" fake_resource", "\"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=15) expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]", "'&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI) self.assertEqual([{'member': 'member'}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_defaults(self, mock_get):", "= 200 expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V200} self.resource_client.update(dict_to_update, uri=uri,", "mock_get): mock_get.return_value = {} uri = self.URI + \"/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" self.resource_client.get(uri)", "Exception was not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client.build_uri( '/rest/test/another/resource/uri/09USE7335NW3') except", "@mock.patch(mock_builtin('open')) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri =", "\"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_id_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri =", "{'nextPageUri': None, \"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results", "NE 'WrongName'\" view = '\"{view-name}\"' scope_uris = '/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a' mock_get.return_value =", "def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch, mock_ensure_resource): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value", "\"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource,", "\"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\ \"&view=day\" mock_get.assert_called_once_with(expected_uri)", "= io.StringIO() result = self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open'))", "= self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once_without_uri(self,", "class StubResourceFileHandler(ResourceFileHandlerMixin, Resource): \"\"\"Stub class to test resource file operations\"\"\"", "= resource_client self.resource_client.URI = self.URI self.resource_client.DEFAULT_VALUES = self.DEFAULT_VALUES self.resource_client.data =", "def setUp(self, resource_client=None): self.resource_client = resource_client self.resource_client.URI = self.URI self.resource_client.DEFAULT_VALUES", "self.resource_client.DEFAULT_VALUES = self.DEFAULT_VALUES self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client._merge_default_values() self.task =", "expected_uri = \"/rest/resource/test?force=True\" mock_put.assert_called_once_with(expected_uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_api_version_200(self,", "= '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) def test_get_utilization_with_empty(self): try: self.resource_client.get_utilization('') except ValueError as", "entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task, self.task mock_wait4task.return_value =", "name\"} mock_post.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client._merge_default_values() expected_dict", "{} self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\", \"Content-Type\":", "'1'}, {'id': '2'}, {'id': '3'}, {'id': '4'}, {'id': '5'}, {'id':", "{\"resource_data\": \"resource_data\", \"uri\": uri} expected = {\"resource_data\": \"resource_data\", \"uri\": uri,", "\\ \"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_id_with_defaults(self, mock_get,", "def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") class ResourceTest(BaseTest): def", "type\", e.args[0]) else: self.fail() @mock.patch.object(connection, 'get') def test_get_schema_uri(self, mock_get): self.resource_client.get_schema()", "mock_get_by.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(ResourceHelper, \"do_get\") @mock.patch.object(Resource, \"get_by\")", "self.assertEqual(self.resource_client.data, dict_to_update) @mock.patch.object(Resource, \"get_by\") def test_get_by_name_with_result(self, mock_get_by): self.resource_client.get_by_name(\"Resource Name,\") mock_get_by.assert_called_once_with(\"name\",", "mock_wait4task, mock_put): response_body = {\"resource_name\": \"name\"} mock_put.return_value = self.task, self.task", "{} self.connection._apiVersion = 200 self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with(uri, request_body, custom_headers={})", "self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input = \"/rest/testuri/09USE7335NW3\" expected_output =", "test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def", "resource utilization methods\"\"\" class StubResourceSchema(ResourceSchemaMixin, Resource): \"\"\"Stub class to test", "self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_default_api_version_300(self,", "= {\"resource_name\": \"a name\"} mock_post.return_value = {}, {} self.connection._apiVersion =", "self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with('/rest/testuri/id', dict_info, custom_headers=headers) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor,", "result) def test_build_uri_with_uri_should_work(self): input = '/rest/testuri/09USE7335NW3' expected_output = '/rest/testuri/09USE7335NW3' result", "@mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value =", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_force(self, mock_put, mock_laod_resource): dict_to_update =", "mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_without_default_values(self, mock_put): dict_to_update =", "'value': 'new_name', }] mock_patch.return_value = {}, {} self.resource_client.patch( '/rest/testuri/123a53cz', 'replace',", "\"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_custom_headers(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\":", "\"test\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=\"/path\", custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY,", "self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def", "'new_name', }] mock_patch.return_value = {}, {} self.resource_client.patch( '/rest/testuri/123a53cz', 'replace', '/name',", "test_extract_id_from_uri_unsupported(self): # This example is not supported yet uri =", "self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_custom_headers(self,", "None, self.response_body self.resource_client.data = {\"uri\": \"/rest/testuri\"} result = self.resource_client.delete() self.assertTrue(result)", "test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock() uri = '/rest/testuri/' filepath", "file_path) self.assertFalse(result) def test_transform_list_to_dict(self): list = ['one', 'two', {'tree': 3},", "StubResourceZeroBody(ResourceZeroBodyMixin, Resource): \"\"\"Stub class to test resoruce zero body methods\"\"\"", "exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected Exception was not", "test_update_with_default_api_version_300(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\"} uri = \"/rest/testuri\"", "results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri =", "try: self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/other/5435534/sub') except exceptions.HPOneViewUnknownType as e: self.assertEqual('Unrecognized URI", "-1) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri", "{}, {} self.resource_client.patch( '/rest/testuri/123a53cz', 'replace', '/name', 'new_name', 60) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz',", "options = [ dict( resource='1', subresource='2', path='sub', uri='/rest/testuri/1/sub/2'), dict( resource='/rest/testuri/3',", "def test_delete_with_custom_headers(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value =", "self.assertEqual([{'member': 'member'}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all()", "under the License is distributed on an \"AS IS\" BASIS,", "= self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, self.response_body) class ResourcePatchMixinTest(BaseTest): def setUp(self):", "test_get_all_with_custom_uri(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources\") uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def", "= self.resource_client.get_by_id(\"123\") self.assertIsNone(response) mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_collection_uri(self, mock_get): mock_get.return_value", "zero body methods\"\"\" class StubResourcePatch(ResourcePatchMixin, Resource): \"\"\"Stub class to test", "test_patch_return_entity(self, mock_wait4task, mock_patch): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task,", "\"wait_for_task\") def test_update_uri(self, mock_wait4task, mock_update, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update", "{} headers = {'Extra': 'extra'} self.connection._apiVersion = 300 resource_client =", "self.task filter = \"name='Exchange Server'\" uri = \"/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True\" self.resource_client.delete_all(filter=filter, force=True,", "= \"/rest/resource/test\" mock_put.return_value = None, self.response_body resource_client = ResourceClient(self.connection, self.URI)", "self.fail(\"Expected Exception was not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri(\"/rest/\") except", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") def test_delete_should_return_true(self, mock_delete, mock_ensure_resource): mock_delete.return_value =", "{} expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1,", "was not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client.build_uri( '/rest/test/another/resource/uri/09USE7335NW3') except exceptions.HPOneViewUnknownType", "uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES)", "uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\": \"EXpected\"},", "file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_open.return_value = io.StringIO() self.resource_client.download(uri,", "for option in options: uri = self.resource_client._helper.build_subresource_uri(option[\"resource\"], option[\"subresource\"], option[\"path\"]) self.assertEqual(uri,", "mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with(\"/rest/testuri\",", "mock_put): dict_to_update = {\"name\": \"test\", \"type\": \"anotherType\"} uri = \"/rest/resource/test\"", "entity) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch): dict_info", "= self.task, {} mock_get_completed_task.return_value = task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result,", "custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_api_version_200(self, mock_put): dict_to_update = {\"name\": \"test\"}", "test_upload_should_call_post_multipart(self, mock_post_multipart): uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value =", "self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail('Expected Exception was not raised') @mock.patch.object(connection, 'get')", "\\ \"&refresh=true\" \\ \"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def", "2) def test_build_uri_with_id_should_work(self): input = \"09USE7335NW35\" expected_output = \"/rest/testuri/09USE7335NW35\" result", "mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"name\": \"test\"} expected", "= {}, {} self.resource_client.create(dict_to_create, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post')", "custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_return_entity(self, mock_wait4task, mock_post):", "@mock.patch.object(Resource, \"get_by\") def test_ensure_resource_without_data_update(self, mock_get_by): mock_get_by.return_value = [] actual_result =", "in e.args[0]) else: self.fail() def test_get_by_with_name_none(self): try: self.resource_client.get_by(None, None) except", "\"resource_name\": \"a name\", } mock_post.return_value = self.task, {} mock_wait4task.return_value =", "self.resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection,", "timeout=-1) self.assertEqual(self.resource_client.data, dict_to_update) @mock.patch.object(Resource, \"get_by\") def test_get_by_name_with_result(self, mock_get_by): self.resource_client.get_by_name(\"Resource Name,\")", "= self.resource_client.get_by('connection.name', 'expected') self.assertEqual(response, [{'name': 'expected'}, {'name': 'not expected'}]) mock_get_all.assert_called_once_with(filter=\"\\\"connection.name='expected'\\\"\",", "= merge_resources(resource1, resource2) self.assertEqual(merged_resource, expected_resource) def test_merge_default_values(self): default_type = {\"type\":", "'/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'post') def test_create_uri(self, mock_post): dict_to_create", "def test_create_report_should_do_post_request(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = []", "'6'}]}, {'nextPageUri': None, 'members': [{'id': '7'}]}] mock_get.side_effect = results result", "self.resource_client.ensure_resource_data) @mock.patch.object(ResourceHelper, \"do_get\") def test_ensure_resource_raise_resource_not_found_exception_with_uri(self, mock_do_get): self.resource_client.data = {\"uri\": \"/uri/test\"}", "self.URI) result = resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) def test_should_not_merge_when_default_values_not_defined(self): resource", "delete_task) mock_delete.assert_called_once_with(self.URI + \"/1?force=True\", custom_headers=None) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input = '/rest/testuri/09USE7335NW3' expected_output = '/rest/testuri/09USE7335NW3'", "\"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch, mock_ensure_resource): entity =", "@mock.patch.object(connection, \"get\") def test_get_collection_with_filter(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=\"name=name\") mock_get.assert_called_once_with(self.URI", "not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri( \"/rest/test/another/resource/uri/09USE7335NW3\") except exceptions.HPOneViewUnknownType as", "mock_update, mock_ensure_resource): mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.URI", "test_ensure_resource_should_update_resource_data(self, mock_do_get, mock_get_by): get_by_return_value = [{\"name\": \"testname\", \"uri\": \"/rest/testuri\"}] self.resource_client.data", "'resource1'} default_values = {} expected = {'name': 'resource1'} resource_client =", "mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create =", "self.resource_client.get_by(None, None) except ValueError as e: self.assertTrue(\"field\" in e.args[0]) else:", "None, self.response_body self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put')", "= {\"name\": \"test\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=\"/path\", custom_headers=self.custom_headers)", "= \"/rest/testuri\" class BaseTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200 = \"typeV200\"", "self.assertTrue(delete_result) mock_delete.assert_called_once_with(\"uri\", custom_headers=None) def test_delete_with_empty_dict(self): try: self.resource_client.delete({}) except ValueError as", "@mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value", "file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream):", "= {\"name\": \"resource2\", \"port\": \"1\"} expected_resource = {\"name\": \"resource2\", \"type\":", "mock_patch.return_value = {}, {} self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with( \"/rest/testuri\", request_body,", "self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_wait4task.assert_called_once_with(self.task, mock.ANY)", "mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_post.assert_called_once_with(", "\"resource1\"} resource2 = {\"name\": \"resource2\"} result_list = merge_default_values([resource1, resource2], default_type)", "= self.resource_client.create_with_zero_body() self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "= self.resource_client.get_by_name('Resource Name,') self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(connection, 'get') def", "= self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, self.resource_client.data)", "mock_get_all.assert_called_once_with(filter=\"\\\"name='exPEcted'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_skip_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\":", "test_create_without_default_values(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value = {},", "self.resource_client.delete({}) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail()", "= \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path)", "uri=\"/rest/testuri/13/sub\"), ] for option in options: uri = self.resource_client._helper.build_subresource_uri(option[\"resource\"], option[\"subresource\"],", "test_get_utilization_by_id_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\")", "else: self.fail() @mock.patch.object(connection, 'get') def test_get_with_uri_should_work(self, mock_get): mock_get.return_value = {}", "'get') def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri = '/rest/testuri/' filepath", "uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1], 'members':", "self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value =", "= None, fake_response_body result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection,", "= {} expected = {'name': 'resource1'} resource_client = ResourceClient(self.connection, self.URI)", "self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers, self.resource_client.ensure_resource_data) @mock.patch.object(ResourceHelper, \"do_get\") def test_ensure_resource_raise_resource_not_found_exception_with_uri(self, mock_do_get): self.resource_client.data = {\"uri\":", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_custom_headers(self, mock_put, mock_ensure_resource): dict_to_update =", "force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_uri_with_force(self, mock_post): dict_to_create", "[{\"resource_name\": \"a name\"}] mock_post.return_value = {}, {} resource_client = ResourceClient(self.connection,", "def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'", "\"wait_for_task\") def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value = self.task, self.task", "'replace', '/name', 'new_name', -1) self.assertEqual(result, entity) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task')", "{\"name\": \"test\"} expected = {\"name\": \"test\", \"uri\": uri, \"type\": \"typeV300\"}", "\"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources?param=value\") uri =", "name\"} mock_post.return_value = {}, {} self.connection._apiVersion = 200 expected_dict =", "\"member\"}]} result = self.resource_helper.get_all( 1, 500, filter, query, sort) uri", "= {} self.resource_helper.get_collection(filter=[\"name1=one\", \"name2=two\", \"name=three\"]) mock_get.assert_called_once_with(self.URI + \"?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, \"get\")", "call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list =", "in options: uri = self.resource_client.build_subresource_uri(option['resource'], option['subresource'], option['path']) self.assertEqual(uri, option['uri']) def", "self.task, self.task mock_wait4task.return_value = response_body new_resource = self.resource_client.create_with_zero_body(timeout=-1) self.assertNotEqual(new_resource, self.resource_client)", "mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v200(self, mock_patch): request_body = [{", "\"123456\", 'sub-path') except exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else: self.fail(\"Expected", "\"op\": \"replace\", \"path\": \"/name\", \"value\": \"new_name\", }] mock_patch.return_value = {},", "self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with( \"/rest/testuri\", request_body, custom_headers={\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\")", "'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={}) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v300(self,", "mock_ensure_resource): request_body = [{ \"op\": \"replace\", \"path\": \"/name\", \"value\": \"new_name\",", "result = self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_response_has_no_items(self,", "new_instance) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value", "200 self.resource_client._merge_default_values() expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create,", "timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_default_api_version_300(self, mock_post):", "self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/other/5435534/sub') except exceptions.HPOneViewUnknownType as e: self.assertEqual('Unrecognized URI for", "\"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources') uri =", "uri = '/rest/testuri?start=0&count=-1' members = [{'id': '1'}, {'id': '2'}, {'id':", "self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection,", "\"/rest/resource/test?force=True\" mock_put.assert_called_once_with(expected_uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_api_version_200(self, mock_put): dict_to_update", "'tree': 3, 'two': True}) def test_extract_id_from_uri(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155' id", "setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceSchema(self.connection) super(ResourceSchemaMixinTest, self).setUp(self.resource_client)", "self.resource_client = StubResourceFileHandler(self.connection) super(ResourceFileHandlerMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart(self, mock_post_multipart):", "= self.resource_client.get_by_name('Resource Name,') self.assertEqual(response, {\"name\": \"value\"}) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(ResourceClient,", "\"/rest/enclosures\" self.resource_client.update_with_zero_body(\"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "'get') def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = '/rest/testuri?start=0&count=-1' members = [{'id':", "= \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = False mock_open.return_value =", "\"\"\"Stub class to test resource utilization methods\"\"\" class StubResourceSchema(ResourceSchemaMixin, Resource):", "{} self.connection._apiVersion = 200 self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY,", "\"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\ \"&view=day\" mock_get.assert_called_once_with(expected_uri)", "@mock.patch.object(connection, \"get\") def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri)", "returned but for pagination purposes, a nextPageUri is returned by", "custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value", "@mock.patch.object(connection, \"get\") def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=15\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"]", "mock_do_get, mock_get_by): get_by_return_value = [{\"name\": \"testname\", \"uri\": \"/rest/testuri\"}] self.resource_client.data =", "= '/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a' mock_get.return_value = {\"members\": [{\"member\": \"member\"}]} result = self.resource_client.get_all(", "resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" fake_resource = FakeResource(None) try: fake_resource.get_fake(uri) except", "task_output = [ {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-24T15: 32: 50.889Z\"}, {\"type\":", "else: self.fail(\"Expected Exception was not raised\") def test_merge_resources(self): resource1 =", "{\"resource_name\": \"name\"} mock_post.return_value = self.task, self.task mock_wait4task.return_value = response_body new_resource", "self.response_body self.resource_client.update(dict_to_update, custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "\"uri\": uri} self.resource_client._merge_default_values() self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() @mock.patch.object(connection, 'delete') def test_delete_with_dict_uri(self,", "mock.Mock() uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None,", "\"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch, mock_ensure_resource):", "{}, {} headers = {\"Extra\": \"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri,", "{'type': TYPE_V300} } def setUp(self): super(ResourceClientTest, self).setUp() self.host = '127.0.0.1'", "@mock.patch.object(connection, 'put') def test_update_should_not_override_resource_properties(self, mock_put): dict_to_update = {\"name\": \"test\", \"type\":", "custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_return_entity(self, mock_wait4task,", "= entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_wait4task.assert_called_once_with(self.task, mock.ANY) class ResourceUtilizationMixinTest(BaseTest): def", "self.fail(\"Expected Exception was not raised\") def test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self): message = \"Unrecognized", "None, self.response_body self.resource_client.URI = \"/rest/enclosures\" result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1)", "mock_get): self.resource_client.get_utilization('09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_uri_with_defaults(self,", "{} delete_result = self.resource_client.delete(resource) self.assertTrue(delete_result) mock_delete.assert_called_once_with(\"uri\", custom_headers=None) def test_delete_with_empty_dict(self): try:", "mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor,", "{\"name\": \"resource1\", \"type\": \"type1\"}, {\"name\": \"resource2\", \"type\": \"type1\"} ] self.assertEqual(result_list,", "uri) mock_wait4task.not_been_called() @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_return_associated_resource_when_response_is_task(self,", "ANY KIND, either express or implied. # See the License", "self.resource_client.update_with_zero_body('1', custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task')", "def test_update_with_custom_headers(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\"} mock_put.return_value =", "uri = \"{resource_uri}?start=1\" \\ \"&count=500\" \\ \"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27\" \\ \"&query=name%20NE%20%27WrongName%27\" \\", "\"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection,", "\"name='Exchange Server'\" uri = \"/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True\" self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_delete.assert_called_once_with(uri) @mock.patch.object(connection,", "io.StringIO() result = self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def", "mock_post.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", {}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once_without_uri(self,", "'/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v200(self, mock_patch): mock_patch.return_value", "the License. # You may obtain a copy of the", "mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_return_entity(self, mock_wait4task,", "[ {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-24T15: 32: 50.889Z\"}, {\"type\": \"FCIssueResponseV2\", \"created\":", "\"new_name\") self.assertEqual(self.resource_client.data, entity) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def", "uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task,", "def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'", "[] mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\") mock_post.assert_called_once_with(\"/rest/path/create-report\",", "@mock.patch.object(connection, \"get\") def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce = mock.Mock()", "e.args[0]) else: self.fail() @mock.patch.object(connection, 'get') def test_get_with_uri_should_work(self, mock_get): mock_get.return_value =", "\"resource_data\", \"uri\": uri} expected = {\"resource_data\": \"resource_data\", \"uri\": uri, \"type\":", "{}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post):", "test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=15\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\":", "test_build_uri_with_none_should_raise_exception(self): try: self.resource_client._helper.build_uri(None) except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else:", "= {\"nextPageUri\": None, \"members\": None} result = self.resource_client.get_all() self.assertEqual(result, [])", "\"Resource Name,\") @mock.patch.object(Resource, \"get_by\") def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = []", "# See the License for the specific language governing permissions", "\"&refresh=true\" \\ \"&view=day\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_multiple_filters(self,", "60) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") def test_delete_should_return_true(self, mock_delete, mock_ensure_resource): mock_delete.return_value", "test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri':", "= [{\"name\": \"value\"}] response = self.resource_client.get_by_name('Resource Name,') self.assertEqual(response, {\"name\": \"value\"})", "\"a_uri\"} mock_update.return_value = self.task, self.response_body mock_wait4task.return_value = self.task update_task =", "self.TYPE_V300, \"uri\": uri} self.resource_client._merge_default_values() self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\")", "not raised\") def test_merge_resources(self): resource1 = {\"name\": \"resource1\", \"type\": \"resource\"}", "{'id': '5'}, {'id': '6'}]}, {'nextPageUri': None, 'members': [{'id': '7'}]}] mock_get.side_effect", "'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_output_list_when_results(self, mock_get_completed_task, mock_post): task_output = [", "expected) def test_should_not_merge_when_default_values_not_defined(self): resource = {'name': 'resource1'} default_values = {}", "@mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart(self, mock_post_multipart): uri = '/rest/testuri/' filepath =", "= '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() self.resource_client.upload(filepath,", "dict( resource=None, subresource='/rest/testuri/9/sub/10', path='sub', uri='/rest/testuri/9/sub/10'), dict( resource='/rest/testuri/11', subresource='12', path='/sub/', uri='/rest/testuri/11/sub/12'),", "{\"resource_data\": \"resource_data\", \"uri\": uri, \"type\": \"typeV300\"} mock_update.return_value = self.task, self.response_body", "mock_patch.return_value = {}, {} headers = {'Content-Type': 'application/json', 'Extra': 'extra'}", "@mock.patch.object(connection, 'delete') def test_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None, self.response_body filter", "test_get_by_name_with_result(self, mock_get_by): self.resource_client.get_by_name(\"Resource Name,\") mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(Resource, \"get_by\") def", "mock_ensure_resource): dict_to_update = {\"name\": \"test\", \"type\": \"anotherType\"} uri = \"/rest/testuri\"", "{\"nextPageUri\": uri_list[2], \"members\": [{\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]}, {'nextPageUri':", "= self.resource_client.create(dict_to_create, -1) self.assertEqual(result, created_resource) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "except ValueError as e: self.assertTrue(\"id\" in e.args[0]) else: self.fail() def", "def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = [] response = self.resource_client.get_by_name('Resource Name,')", "\"uri\": \"a_uri\", } mock_put.return_value = self.task, {} mock_wait4task.return_value = dict_to_update", "mock_wait4task, mock_post_multipart): uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value =", "\"/name\", \"new_name\") mock_wait4task.assert_called_once_with(self.task, mock.ANY) class ResourceUtilizationMixinTest(BaseTest): def setUp(self): self.connection =", "connection('127.0.0.1', 300) self.resource_client = StubResource(self.connection) super(ResourceTest, self).setUp(self.resource_client) self.resource_helper = ResourceHelper(self.URI,", "{\"id\": \"3\"}, {\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}, {\"id\": \"7\"}]", "self.task, {} mock_wait4task.return_value = dict_to_update self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(self.resource_client.data, dict_to_update) @mock.patch.object(Resource,", "= [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=1\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\":", "returned by OneView. \"\"\" uri_list = [\"/rest/testuri?start=0&count=3\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results", "self.resource_client.build_uri('/rest/') except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected Exception", "\"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, 'SPPgen9snap6.2015_0405.81.iso')", "'09USE7335NW35' expected_output = '/rest/testuri/09USE7335NW35' result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def", "as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else: self.fail(\"Expected Exception was not raised\")", "'1'}, {'id': '2'}, {'id': '3'}]}, {'nextPageUri': uri_list[2], 'members': [{'id': '4'},", "= {}, {} delete_result = self.resource_client.delete(resource) self.assertTrue(delete_result) mock_delete.assert_called_once_with(\"uri\", custom_headers=None) def", "= {}, {} headers = {\"Extra\": \"extra\"} self.connection._apiVersion = 300", "\"put\") def test_update_with_custom_headers(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\"} mock_put.return_value", "= None, mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, \"SPPgen9snap6.2015_0405.81.iso\") @mock.patch.object(connection, \"post_multipart_with_response_handling\")", "@mock.patch.object(connection, 'get') def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3']", "{\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} collection = self.resource_client.get_collection('12345') self.assertEqual(len(collection), 2)", "= \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources\") uri", "as e: self.assertEqual('Unrecognized URI for this resource', e.args[0]) else: self.fail()", "dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_uri(self, mock_wait4task, mock_update):", "expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_uri(self,", "self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_get_with_none(self): try: self.resource_client.get(None) except", "timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_should_not_override_resource_properties(self, mock_post):", "= self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "\"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, 'post_multipart_with_response_handling')", "None, self.response_body new_resource = self.resource_client.create_with_zero_body() self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "= \"/rest/resource/test\" mock_put.return_value = None, self.response_body expected_dict = {\"name\": \"test\",", "self.resource_client.update(dict_to_update) self.assertEqual(self.response_body, self.resource_client.data) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\")", "nextPageUri is returned by OneView. \"\"\" uri_list = [\"/rest/testuri?start=0&count=3\", \"/rest/testuri?start=3&count=3\",", "'&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_id_with_defaults(self, mock_get): self.resource_client.get_utilization('09USE7335NW3') expected_uri =", "@mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {'nextPageUri': None, 'members':", "FC Network'\" sort = \"name:ascending\" query = \"name NE 'WrongName'\"", "mock_get.assert_called_once_with('/rest/testuri') @mock.patch.object(connection, \"get\") def test_get_by_id_with_result(self, mock_get): self.resource_client.get_by_id(\"123\") mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\")", "refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z' \\", "self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_id_should_work(self):", "test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_open.return_value", "\"type\": \"typeV300\"} mock_update.return_value = self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.update(dict_to_update,", "Exception was not raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client._helper.build_uri('') except ValueError", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value = self.task,", "= self.task delete_task = self.resource_client.delete('1', force=True, timeout=-1) self.assertEqual(self.task, delete_task) mock_delete.assert_called_once_with(self.URI", "\"put\") def test_update_with_uri_called_once(self, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update =", "@mock.patch.object(Resource, \"get_by\") def test_get_by_name_with_result(self, mock_get_by): self.resource_client.get_by_name(\"Resource Name,\") mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\")", "= \"/rest/testuri/09USE7335NW35\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input", "default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_should_not_override_resource_properties(self, mock_put): dict_to_update", "\"a name\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create,", "mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_should_not_override_resource_properties(self, mock_put,", "self.assertEqual(result, self.response_body) class ResourcePatchMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300)", "connection('127.0.0.1', 300) self.resource_client = StubResourceUtilization(self.connection) super(ResourceUtilizationMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "filter = \"name='Exchange Server'\" result = self.resource_helper.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result)", "mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = True", "@mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body =", "refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\", "test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" dict_info = {\"resource_name\":", "'?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get')", "[call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get):", "custom_headers=None) @mock.patch.object(connection, 'put') def test_update_should_not_override_resource_properties(self, mock_put): dict_to_update = {\"name\": \"test\",", "uri = \"/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True\" self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_delete.assert_called_once_with(uri) @mock.patch.object(connection, 'delete') def", "\"get\") def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=[\"name1=one\", \"name2=two\", \"name=three\"])", "\"new_name\") mock_wait4task.assert_called_once_with(self.task, mock.ANY) class ResourceUtilizationMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1',", "except exceptions.HPOneViewUnknownType as e: self.assertEqual(\"Unknown object type\", e.args[0]) else: self.fail()", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "\"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_patch(self, mock_wait4task,", "= {\"name\": \"resource1\"} resource2 = {\"name\": \"resource2\"} result_list = merge_default_values([resource1,", "= ResourceClient(con, \"/rest/fake/resource\") def get_fake(self, uri): return self._client.get(uri) class ResourceClientTest(unittest.TestCase):", "\"/rest/testuri\" expected = {\"name\": \"test\", \"uri\": uri, \"type\": \"typeV300\"} mock_put.return_value", "as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_get_with_none(self): try:", "uri, dict_info, custom_headers={\"Extra\": \"extra\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\")", "{\"name\": \"not expected\"}] response = self.resource_client.get_by('connection.name', 'expected') self.assertEqual(response, [{'name': 'expected'},", "mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) class ResourceSchemaMixinTest(BaseTest): def setUp(self):", "{\"id\": \"5\"}, {\"id\": \"6\"}]}, {\"nextPageUri\": None, \"members\": [{\"id\": \"7\"}, {\"id\":", "mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1],", "writing, software # distributed under the License is distributed on", "['name1=one', 'name2=two', 'name=three']) mock_get.assert_called_once_with(self.URI + \"/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, 'get') def test_get_collection_should_return_list(self,", "mock_get): self.resource_client.get_utilization( '09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter=['startDate=2016-05-30T03:29:42.361Z', 'endDate=2016-05-31T03:29:42.361Z'], refresh=True, view='day') expected_uri =", "\"anotherType\"} self.resource_client.create(dict_to_create) mock_post.assert_called_once_with(self.URI, expected, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_without_default_values(self, mock_post):", "\"ensure_resource_data\") def test_ensure_resource_should_call_once(self, mock_do_put, mock_ensure_resource): self.resource_client.data = {\"uri\": \"/rest/test\"} self.resource_client.update(data={\"name\":", "test_patch_request_when_uri_is_provided(self, mock_patch, mock_ensure_resource): request_body = [{ \"op\": \"replace\", \"path\": \"/name\",", "def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update): mock_update.return_value = self.task, self.task mock_wait4task.return_value =", "mock_patch): mock_patch.return_value = {}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.patch('/rest/testuri/123',", "unittest import mock from mock import call from tests.test_utils import", "custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put):", "default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create", "= {\"name\": \"test\"} expected = {\"name\": \"test\", \"uri\": uri, \"type\":", "{\"uri\": \"/rest/testuri\"} self.resource_client.delete(force=True) mock_delete.assert_called_once_with(\"/rest/testuri?force=True\", custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor,", "@mock.patch.object(Resource, \"get_by\") def test_ensure_resource_should_update_resource_data(self, mock_do_get, mock_get_by): get_by_return_value = [{\"name\": \"testname\",", "\"application/json\", \"Extra\": \"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with(uri,", "\"a name\", \"force\": \"yes\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, timeout=-1)", "300) self.resource_client = StubResourceSchema(self.connection) super(ResourceSchemaMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"get\") def test_get_schema_uri(self,", "self).setUp(self.resource_client) self.resource_helper = ResourceHelper(self.URI, self.connection, None) @mock.patch.object(ResourceHelper, \"do_put\") @mock.patch.object(Resource, \"ensure_resource_data\")", "self.URI) resource_client.update(dict_to_update, uri=uri) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task')", "test_create_report_should_raise_exception_when_not_task(self, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value =", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v200(self, mock_patch, mock_ensure_resource): uri =", "'get_completed_task') def test_create_report_should_return_empty_list_when_output_is_empty(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] =", "def test_create_with_default_api_version_300(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value =", "was not raised\") def test_merge_resources(self): resource1 = {\"name\": \"resource1\", \"type\":", "@mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_uri(self, mock_wait4task, mock_update, mock_ensure_resource): uri", "= self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client._helper.build_uri(None) except ValueError", "'typeV200' TYPE_V300 = 'typeV300' DEFAULT_VALUES = { '200': {'type': TYPE_V200},", "mock_get.return_value = {} self.resource_client.get_collection('12345', ['name1=one', 'name2=two', 'name=three']) mock_get.assert_called_once_with(self.URI + \"/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\")", "return self._client.get(uri) class ResourceClientTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200 = 'typeV200'", "self.fail() @mock.patch.object(connection, 'delete') def test_delete_with_dict_uri(self, mock_delete): resource = {\"uri\": \"uri\"}", "default_values = {} expected = {'name': 'resource1'} resource_client = ResourceClient(self.connection,", "= \"/rest/testuri/09USE7335NW3\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try:", "= {'name': 'resource1'} default_values = { '200': {\"type\": \"EnclosureGroupV200\"}, '300':", "@mock.patch.object(connection, 'post') def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create = {\"resource_name\": \"a name\",", "expected_items = [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}, {\"id\": \"4\"},", "self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(custom_headers=self.custom_headers)", "result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client.build_uri(None) except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID,", "} expected = {'name': 'resource1', \"type\": \"EnclosureGroupV300\"} resource_client = ResourceClient(self.connection,", "\"do_get\") def test_ensure_resource_raise_resource_not_found_exception_with_uri(self, mock_do_get): self.resource_client.data = {\"uri\": \"/uri/test\"} mock_do_get.return_value =", "\"testname\"} mock_get_by.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(ResourceHelper, \"do_get\") @mock.patch.object(Resource,", "uri=uri) self.assertEqual(self.response_body, response) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_custom_headers(self,", "\"7\"}]}] mock_get.side_effect = results result = self.resource_client.get_all() expected_items = [{'id':", "mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, 'SPPgen9snap6.2015_0405.81.iso') @mock.patch.object(connection,", "@mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value =", "'5'}, {'id': '6'}, {'id': '7'}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get') def", "mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"resource_name\": \"a name\", \"uri\":", "expected_calls) @mock.patch.object(connection, 'get') def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3',", "[{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_client.get_collection('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(connection, 'get')", "{}, {} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\")", "{'nextPageUri': None, 'members': None} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection,", "mock_post.return_value = {}, {} expected = {\"resource_name\": \"a name\", \"type\":", "subresource='/rest/testuri/5/sub/6', path='sub', uri='/rest/testuri/5/sub/6'), dict( resource='/rest/testuri/7', subresource='/rest/testuri/7/sub/8', path='sub', uri='/rest/testuri/7/sub/8'), dict( resource=None,", "self.connection, None) @mock.patch.object(ResourceHelper, \"do_put\") @mock.patch.object(Resource, \"ensure_resource_data\") def test_ensure_resource_should_call_once(self, mock_do_put, mock_ensure_resource):", "mock_get_by.return_value = [] actual_result = self.resource_client.ensure_resource_data(update_data=False) expected_result = None self.assertEqual(actual_result,", "{} self.resource_client.create(dict_to_create, timeout=-1) expected_uri = \"/rest/testuri\" mock_post.assert_called_once_with(expected_uri, dict_to_create, custom_headers=None) @mock.patch.object(connection,", "methods\"\"\" URI = \"/rest/testuri\" class BaseTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200", "self.assertEqual(result, fake_response_body) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path", "= self.task, self.response_body mock_wait4task.return_value = self.task update_task = self.resource_client.update(dict_to_update, False)", "try: self.resource_client.create(None) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else:", "def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client._helper.build_uri(None) except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])", "mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_custom_headers(self, mock_put): dict_to_update =", "\"\"\"Stub class to test resource common methods\"\"\" URI = \"/rest/testuri\"", "'get_by') def test_get_by_name_with_result(self, mock_get_by): mock_get_by.return_value = [{\"name\": \"value\"}] response =", "= [] response = self.resource_client.get_by_name('Resource Name,') self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,')", "{\"id\": \"2\"}, {\"id\": \"3\"}]}, {\"nextPageUri\": uri_list[2], \"members\": [{\"id\": \"4\"}, {\"id\":", "= {\"uri\": \"/rest/testuri\"} self.resource_client.delete(custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(connection, \"get\") def test_get_by_uri(self, mock_get): self.resource_client.get_by_uri(\"/rest/testuri\")", "\"yes\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, timeout=-1) expected_uri = \"/rest/testuri\"", "= 300 resource_client = ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with(", "mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\":", "'put') def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None, self.response_body result =", "self.resource_client.update(dict_to_update, uri=uri) self.assertEqual(self.response_body, response) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def", "'get') def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results", "= {}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.patch('/rest/testuri/123', 'operation', '/field',", "{'id': '6'}]}, {'nextPageUri': None, 'members': [{'id': '7'}]}] mock_get.side_effect = results", "new_instance = self.resource_client.create({}) self.assertNotEqual(self.resource_client, new_instance) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "'new_name', -1) mock_wait4task.assert_called_once_with(self.task, mock.ANY) def test_delete_with_none(self): try: self.resource_client.delete(None) except ValueError", "URI for this resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" try: self.resource_client.get(uri) except", "test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" fake_file", "= {}, {} headers = {'Extra': 'extra'} self.connection._apiVersion = 300", "\"test\", \"type\": \"typeV300\", \"uri\": uri} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update)", "request_body, custom_headers={}) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v300(self, mock_patch): request_body = [{", "= self.task, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor,", "mock_post.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client._merge_default_values() expected_dict =", "False mock_open.return_value = io.StringIO() result = self.resource_client.download(uri, file_path) self.assertFalse(result) def", "def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "resource common methods\"\"\" URI = \"/rest/testuri\" class BaseTest(unittest.TestCase): URI =", "custom_headers={\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_uri_is_provided(self, mock_patch, mock_ensure_resource):", "\"get\") def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources?param=value\") uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection,", "uri} expected = {\"name\": \"test\", \"type\": \"typeV300\", \"uri\": uri} mock_put.return_value", "= io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open'))", "= ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post')", "self.resource_client.URI = \"/rest/enclosures\" self.resource_client.update_with_zero_body(\"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(Resource,", "custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') def test_update_with_force(self, mock_put): dict_to_update = {\"name\":", "\"wait_for_task\") def test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value = self.task, {} mock_wait4task.return_value", "test_delete_by_id_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task", "in e.args[0]) else: self.fail() def test_create_with_empty_dict(self): try: self.resource_client.create({}) except ValueError", "mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.delete('1',", "= {\"resource_name\": \"a name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI,", "ResourcePatchMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourcePatch(self.connection)", "mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources?param=value\") uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_different_resource_uri_should_fail(self,", "path=\"/sub/\", uri=\"/rest/testuri/13/sub\"), ] for option in options: uri = self.resource_client._helper.build_subresource_uri(option[\"resource\"],", "filter=['startDate=2016-05-30T03:29:42.361Z', 'endDate=2016-05-31T03:29:42.361Z'], refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\", "{\"name\": \"resource1\", \"type\": \"resource\"} resource2 = {\"name\": \"resource2\", \"port\": \"1\"}", "mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value = {}, {}", "dict_transformed = transform_list_to_dict(list=list) self.assertEqual(dict_transformed, {'5': True, 'four': True, 'one': True,", "name\", \"type\": \"anotherType\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI,", "test_create_uri(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value = {},", "@mock.patch.object(connection, 'delete') def test_delete_with_dict_uri(self, mock_delete): resource = {\"uri\": \"uri\"} mock_delete.return_value", "{\"resource_name\": \"a name\", \"type\": \"anotherType\"} self.resource_client.create(dict_to_create) mock_post.assert_called_once_with(self.URI, expected, custom_headers=None) @mock.patch.object(connection,", "else: self.fail() def test_get_collection_with_none(self): try: self.resource_client.get_collection(None) except ValueError as e:", "{\"uri\": \"uri\"} mock_delete.return_value = {}, {} delete_result = self.resource_client.delete(resource) self.assertTrue(delete_result)", "= connection(self.host, 300) self.resource_client = ResourceClient(self.connection, self.URI) self.task = {\"task\":", "\\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection,", "else: self.fail() def test_create_with_empty_dict(self): try: self.resource_client.create({}) except ValueError as e:", "[ dict( resource='1', subresource='2', path='sub', uri='/rest/testuri/1/sub/2'), dict( resource='/rest/testuri/3', subresource='4', path='sub',", "{\"type\": \"EnclosureGroupV300\"} } expected = {'name': 'resource1', \"type\": \"EnclosureGroupV300\"} resource_client", "purposes, a nextPageUri is returned by OneView. \"\"\" uri_list =", "\"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=15) expected_calls = [call(uri_list[0]),", "@mock.patch.object(connection, \"get\") def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"]", "mock_put): mock_put.return_value = None, self.response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1)", "self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(ResourceHelper, \"do_get\") @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_should_update_resource_data(self, mock_do_get, mock_get_by): get_by_return_value", "'?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri)", "custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_return_entity(self, mock_wait4task, mock_put): dict_to_update", "mock_put.assert_called_once_with(expected_uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_default_api_version_300(self, mock_put,", "self.task self.resource_client.URI = \"/rest/enclosures\" self.resource_client.update_with_zero_body(\"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None)", "actual_result = self.resource_client.ensure_resource_data(update_data=False) expected_result = None self.assertEqual(actual_result, expected_result) @mock.patch.object(connection, \"get\")", "mock_update.return_value = self.task, self.response_body mock_wait4task.return_value = self.task update_task = self.resource_client.update(dict_to_update,", "= [ dict( resource='1', subresource='2', path='sub', uri='/rest/testuri/1/sub/2'), dict( resource='/rest/testuri/3', subresource='4',", "= self.task, self.task mock_wait4task.return_value = response_body new_resource = self.resource_client.create_with_zero_body(timeout=-1) self.assertNotEqual(new_resource,", "\\ '&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection,", "expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create = {\"resource_name\":", "{\"resource_name\": \"a name\"} mock_post.return_value = {}, {} resource_client = ResourceClient(self.connection,", "\"/rest/testuri\" request_body = [{ \"op\": \"replace\", \"path\": \"/name\", \"value\": \"new_name\",", "test resource common methods\"\"\" URI = \"/rest/testuri\" class BaseTest(unittest.TestCase): URI", "not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri(\"/rest/\") except exceptions.HPOneViewUnknownType as exception:", "mock_patch): dict_info = {\"resource_name\": \"a name\"} mock_patch.return_value = {}, {}", "self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'put') def test_update_with_uri_called_once(self, mock_put): dict_to_update = {\"name\":", "mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "\"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value = self.task,", "\"value\"}) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value", "\"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY,", "test_get_utilization_with_multiple_filters(self, mock_get): self.resource_client.get_utilization( '09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter=['startDate=2016-05-30T03:29:42.361Z', 'endDate=2016-05-31T03:29:42.361Z'], refresh=True, view='day') expected_uri", "self.task = {\"task\": \"task\", \"taskState\": \"Finished\"} self.response_body = {\"body\": \"body\"}", "\"200\": {\"type\": TYPE_V200}, \"300\": {\"type\": TYPE_V300} } def setUp(self, resource_client=None):", "expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3',", "{\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}, {\"id\": \"7\"}] self.assertSequenceEqual(result, expected_items)", "def test_build_uri_with_uri_should_work(self): input = \"/rest/testuri/09USE7335NW3\" expected_output = \"/rest/testuri/09USE7335NW3\" result =", "'MyFibreNetwork', uri='/rest/testuri/5435534/sub') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri/5435534/sub') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with__invalid_uri(self, mock_get_all): try:", "entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_wait4task.assert_called_once_with(self.task, mock.ANY) class ResourceUtilizationMixinTest(BaseTest): def setUp(self):", "mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} collection =", "uri='/rest/testuri/5435534/sub') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri/5435534/sub') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with__invalid_uri(self, mock_get_all): try: self.resource_client.get_by('name',", "mock_wait4task, mock_put): dict_to_update = { \"resource_name\": \"a name\", \"uri\": \"a_uri\",", "= self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_false_when_error(self, mock_open,", "methods\"\"\" class StubResource(Resource): \"\"\"Stub class to test resource common methods\"\"\"", "{\"name\": \"test\", \"uri\": uri, \"type\": \"typeV300\"} mock_put.return_value = None, self.response_body", "= None, self.response_body response = self.resource_client.update(dict_to_update, uri=uri) self.assertEqual(self.response_body, response) mock_put.assert_called_once_with(uri,", "\"3\"}] mock_get.return_value = { \"nextPageUri\": uri, \"members\": members, \"uri\": uri", "= {'name': 'resource1', \"type\": \"EnclosureGroupV300\"} resource_client = ResourceClient(self.connection, self.URI) result", "mock_put.assert_called_once_with(expected_uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_api_version_200(self, mock_put): dict_to_update =", "def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream):", "\"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart):", "def test_patch_request_when_id_is_provided_v300(self, mock_patch, mock_ensure_resource): request_body = [{ \"op\": \"replace\", \"path\":", "result = self.resource_client.get_all(count=15) expected_items = [{'id': '1'}, {'id': '2'}, {'id':", "= {\"resource_name\": \"a name\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, timeout=-1)", "\"endDate=2016-05-31T03:29:42.361Z\"], refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z\"", "test_ensure_resource_should_call_once(self, mock_do_put, mock_ensure_resource): self.resource_client.data = {\"uri\": \"/rest/test\"} self.resource_client.update(data={\"name\": \"test\"}) mock_do_put.assert_called_once()", "'/name', 'new_name', 60) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch')", "def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock() uri = '/rest/testuri/'", "dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value = {}, {} expected_dict", "expected\"}] response = self.resource_client.get_by('name', 'exPEcted') self.assertEqual(response, [{\"name\": \"EXpected\"}]) mock_get_all.assert_called_once_with(filter=\"\\\"name='exPEcted'\\\"\", uri='/rest/testuri')", "dict_to_update) @mock.patch.object(Resource, \"get_by\") def test_get_by_name_with_result(self, mock_get_by): self.resource_client.get_by_name(\"Resource Name,\") mock_get_by.assert_called_once_with(\"name\", \"Resource", "@mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources\") uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri)", "['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'},", "+ \"/12345?filter=name%3Dname\") @mock.patch.object(connection, 'get') def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {}", "response = self.resource_client.get_by_id(\"123\") self.assertIsNone(response) mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_collection_uri(self, mock_get):", "= \"/rest/testuri/09USE7335NW3\" expected_output = \"/rest/testuri/09USE7335NW3\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result)", "mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value =", "result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'put') def", "exception: self.assertEqual(message, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def", "expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) class ResourceSchemaMixinTest(BaseTest): def setUp(self): self.connection =", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v300(self, mock_patch, mock_ensure_resource): mock_patch.return_value =", "mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task", "@mock.patch.object(connection, \"get\") def test_get_collection_uri(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"},", "'en_US'} @mock.patch.object(connection, 'get') def test_get_all_called_once(self, mock_get): filter = \"'name'='OneViewSDK \\\"Test", "mock_put.return_value = None, self.response_body response = self.resource_client.update(dict_to_update, uri=uri) self.assertEqual(self.response_body, response)", "self.resource_client.create(dict_to_create, timeout=-1) expected_uri = \"/rest/testuri\" mock_post.assert_called_once_with(expected_uri, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post')", "'7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, 'get')", "test_update_with_empty_dict(self): try: self.resource_client.update({}) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0])", "try: self.resource_client.get(uri) except exceptions.HPOneViewUnknownType as exception: self.assertEqual(message, exception.args[0]) else: self.fail(\"Expected", "self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'post')", "{\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch(", "'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_empty_list_when_output_is_empty(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy()", "dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_api_version_200(self, mock_put): dict_to_update = {\"name\":", "timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create", "self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_open_file(self,", "mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" request_body = [{ \"op\": \"replace\",", "False) self.assertEqual(self.task, update_task) mock_update.assert_called_once_with(\"a_uri\", dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task')", "def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client.build_uri('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])", "path=\"sub\", uri=\"/rest/testuri/3/sub/4\"), dict( resource=\"5\", subresource=\"/rest/testuri/5/sub/6\", path=\"sub\", uri=\"/rest/testuri/5/sub/6\"), dict( resource=\"/rest/testuri/7\", subresource=\"/rest/testuri/7/sub/8\",", "self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, []) @mock.patch.object(connection, 'post') def test_create_report_should_raise_exception_when_not_task(self, mock_post): task_with_output =", "Name,\") @mock.patch.object(connection, \"get\") def test_get_by_uri(self, mock_get): self.resource_client.get_by_uri(\"/rest/testuri\") mock_get.assert_called_once_with('/rest/testuri') @mock.patch.object(connection, \"get\")", "{\"nextPageUri\": None, \"members\": [{\"id\": \"7\"}]}] mock_get.side_effect = results result =", "= \"/rest/testuri\" dict_to_update = {\"name\": \"test\"} expected = {\"name\": \"test\",", "ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with('/rest/testuri/id', dict_info, custom_headers=headers) @mock.patch.object(connection, 'patch')", "def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources?param=value\") uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\")", "\"value\": \"new_name\", }] mock_patch.return_value = {}, {} self.resource_client.patch(\"replace\", \"/name\", \"new_name\")", "uri = \"/rest/testuri?start=0&count=-1\" members = [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\":", "@mock.patch.object(Resource, \"get_by\") def test_ensure_resource_raise_resource_not_found_exception_without_uri(self, mock_get_by): self.resource_client.data = {\"name\": \"testname\"} mock_get_by.return_value", "\"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_args(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization(fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=\"startDate=2016-05-30T03:29:42.361Z\", refresh=True,", "response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'post')", "\"name\"} mock_post.return_value = self.task, self.task mock_wait4task.return_value = response_body new_resource =", "test_merge_resources(self): resource1 = {\"name\": \"resource1\", \"type\": \"resource\"} resource2 = {\"name\":", "'wb') mock_download_to_stream.assert_called_once_with(fake_file, mock.ANY) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream):", "[{'id': '1'}, {'id': '2'}, {'id': '3'}]}, {'nextPageUri': uri_list[2], 'members': [{'id':", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "self._client.get(uri) class ResourceClientTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200 = 'typeV200' TYPE_V300", "mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_return_entity(self,", "'en_US'}) @mock.patch.object(connection, 'put') def test_update_with_force(self, mock_put): dict_to_update = {\"name\": \"test\"}", "uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_without_default_values(self, mock_put):", "except exceptions.HPOneViewException as exception: self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED, exception.args[0]) else: self.fail(\"Expected Exception was", "expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\") def", "self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources\") uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri_and_query_string(self, mock_get):", "mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") def test_delete_should_return_true(self, mock_delete, mock_ensure_resource):", "{\"name\": \"test\", \"type\": \"typeV300\"} self.resource_client.data = {'uri': uri} expected =", "fake_response_body result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open'))", "= \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri) @mock.patch.object(connection,", "request_body, custom_headers={}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v300(self, mock_patch, mock_ensure_resource):", "results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri =", "self.resource_client.get_by_name(\"Resource Name,\") mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(Resource, \"get_by\") def test_get_by_name_without_result(self, mock_get_by):", "dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_without_default_values(self, mock_post): dict_to_create = {\"resource_name\":", "mock_wait4task.return_value = self.task self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'patch')", "mock_patch.return_value = {}, {} headers = {'Extra': 'extra'} self.connection._apiVersion =", "= \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" try: self.resource_client.get(uri) except exceptions.HPOneViewUnknownType as exception: self.assertEqual(message, exception.args[0])", "mock_get_completed_task, mock_post): task_output = [ {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-24T15: 32:", "\"2\"}, {\"id\": \"3\"}]}, {\"nextPageUri\": uri_list[2], \"members\": [{\"id\": \"4\"}, {\"id\": \"5\"},", "= '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' fake_file = io.StringIO() mock_open.return_value = fake_file self.resource_client.download(uri, file_path)", "test_refresh(self, mock_do_get): updated_data = {\"resource_name\": \"updated name\"} mock_do_get.return_value = updated_data", "uri = \"/rest/testuri\" mock_put.return_value = None, self.response_body expected_dict = {\"name\":", "'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_should_wait_for_task(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task,", "None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with('/rest/testuri', mock.ANY, mock.ANY) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task')", "mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} collection = self.resource_helper.get_collection()", "call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "self.task mock_wait4task.return_value = self.task self.resource_client.URI = \"/rest/enclosures\" self.resource_client.update_with_zero_body(\"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) mock_update.assert_called_once_with(", "self.assertEqual(self.resource_client.data, get_by_return_value[0]) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_without_data_update(self, mock_get_by): mock_get_by.return_value = []", "None, \"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=3)", "result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task')", "def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch): dict_info = {\"resource_name\": \"a name\"} mock_patch.return_value", "option['uri']) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client.build_subresource_uri(None, \"123456\", 'sub-path') except exceptions.HPOneViewValueError as", "'123a53cz', 'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'})", "e: self.assertTrue(\"field\" in e.args[0]) else: self.fail() @mock.patch.object(connection, 'get') def test_get_with_uri_should_work(self,", "= { \"resource_name\": \"a name\", } created_resource = { \"resource_id\":", "uri=\"/path\", custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') def test_update_with_force(self,", "mock_wait4task.return_value = response_body result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, response_body)", "self.fail() def test_get_collection_with_none(self): try: self.resource_client.get_collection(None) except ValueError as e: self.assertTrue(\"id\"", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "test_ensure_resource_raise_resource_not_found_exception_with_uri(self, mock_do_get): self.resource_client.data = {\"uri\": \"/uri/test\"} mock_do_get.return_value = [] with", "expected_items = [{'id': '1'}, {'id': '2'}, {'id': '3'}, {'id': '4'},", "test_delete_with_force(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value =", "\"type\": \"typeV300\", \"uri\": uri} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) self.assertEqual(self.response_body,", "response_body = {\"resource_name\": \"name\"} self.resource_client.URI = \"/rest/enclosures\" mock_put.return_value = self.task,", "self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self):", "mock_do_get): updated_data = {\"resource_name\": \"updated name\"} mock_do_get.return_value = updated_data self.resource_client.refresh()", "= \"Unrecognized URI for this resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" try:", "= [] mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\",", "60) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_output_list_when_results(self, mock_get_completed_task, mock_post): task_output", "uri = \"/rest/testuri\" dict_to_update = {\"resource_name\": \"a name\", \"uri\": uri}", "headers = {\"Content-Type\": \"application/json\", \"Extra\": \"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri,", "mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_api_version_200(self, mock_post): dict_to_create =", "def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results =", "\"task\", \"taskState\": \"Finished\"} self.response_body = {\"body\": \"body\"} self.custom_headers = {'Accept-Language':", "'/name', 'value': 'new_name', }] mock_patch.return_value = {}, {} resource_client =", "{\"type\": \"EnclosureGroupV200\"}, '300': {\"type\": \"EnclosureGroupV300\"} } expected = {'name': 'resource1',", "self.resource_helper.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "mock_wait4task.return_value = self.task self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(Resource, \"ensure_resource_data\")", "resource_client = ResourceClient(self.connection, self.URI) resource_client.update(dict_to_update, uri=uri) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection,", "\"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock()", "custom_headers=headers) mock_patch.assert_called_once_with( '/rest/testuri/id', dict_info, custom_headers={'Extra': 'extra', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch')", "self.host = '127.0.0.1' self.connection = connection(self.host, 300) self.resource_client = ResourceClient(self.connection,", "= self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get):", "} result = self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def", "\"/rest/resource/test\" mock_put.return_value = None, self.response_body resource_client = ResourceClient(self.connection, self.URI) resource_client.update(dict_to_update,", "resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) def test_merge_api_default_values(self): resource = {'name':", "\"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri", "self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_should_not_override_resource_properties(self, mock_post):", "\"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") def test_delete_should_return_true(self,", "test_create_with_none(self): try: self.resource_client.create(None) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0])", "\"type\": \"EnclosureGroupV300\"} resource_client = ResourceClient(self.connection, self.URI) result = resource_client.merge_default_values(resource, default_values)", "{\"uri\": \"/rest/testuri\"} result = self.resource_client.delete() self.assertTrue(result) @mock.patch.object(connection, 'delete') def test_helper_delete_all_should_return_true(self,", "\"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=15) expected_calls", "test_update_with_api_version_200(self, mock_put): dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value", "uri_list = [\"/rest/testuri?start=0&count=3\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1], \"members\":", "{}, {} self.connection._apiVersion = 200 self.resource_client._merge_default_values() expected_dict = {\"resource_name\": \"a", "{\"name\": \"not expected\"}] response = self.resource_client.get_by('name', 'exPEcted') self.assertEqual(response, [{\"name\": \"EXpected\"}])", "self.resource_client.update(None) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail()", "= self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'post') def test_create_uri(self,", "= StubResourcePatch(self.connection) super(ResourcePatchMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v200(self,", "'get') def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In this case, the user", "subresource='12', path='/sub/', uri='/rest/testuri/11/sub/12'), dict( resource='/rest/testuri/13', subresource=None, path='/sub/', uri='/rest/testuri/13/sub'), ] for", "else: self.fail(\"Expected Exception was not raised\") def test_build_subresource_uri(self): options =", "self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'post') def test_create_with_zero_body_without_task(self, mock_post):", "self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name',", "expected = {\"name\": \"test\", \"type\": \"typeV300\", \"uri\": uri} mock_put.return_value =", "@mock.patch.object(connection, \"get\") def test_get_collection_should_return_list(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"},", "@mock.patch.object(connection, 'put') def test_update_with_default_api_version_300(self, mock_put): dict_to_update = {\"name\": \"test\"} uri", "self.resource_client.create(None) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail()", "= \"'name'='OneViewSDK \\\"Test FC Network'\" sort = \"name:ascending\" query =", "mock_patch.return_value = {}, {} headers = {\"Extra\": \"extra\"} self.connection._apiVersion =", "\"/rest/testuri\" dict_to_update = {\"resource_name\": \"a name\", \"uri\": uri} mock_put.return_value =", "'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_return_entity(self, mock_wait4task, mock_post): dict_to_create", "{\"key\": \"value\"}]} self.resource_client.get_collection('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(connection, 'get') def test_get_collection_with_filter(self,", "timeout=-1) self.assertEqual(result, dict_to_update) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once(self, mock_wait4task,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "delete_task = self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_wait4task.assert_called_with(self.task, timeout=-1) self.assertEqual(self.task, delete_task) @mock.patch.object(connection,", "@mock.patch.object(connection, \"get\") def test_get_all_called_once(self, mock_get): filter = \"'name'='OneViewSDK \\\"Test FC", "mock_get_by): self.resource_client.get_by_name(\"Resource Name,\") mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(Resource, \"get_by\") def test_get_by_name_without_result(self,", "mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_return_entity(self,", "{\"id\": \"6\"}]}, {\"nextPageUri\": None, \"members\": [{\"id\": \"7\"}]}] mock_get.side_effect = results", "'&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_id_with_defaults(self,", "mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' fake_file = io.StringIO()", "e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_create_with_empty_dict(self): try: self.resource_client.create({})", "\"\"\"Stub class to test resource schema methods\"\"\" class StubResource(Resource): \"\"\"Stub", "uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=1\"] results = [{\"nextPageUri\": uri_list[1], \"members\":", "resource='/rest/testuri/11', subresource='12', path='/sub/', uri='/rest/testuri/11/sub/12'), dict( resource='/rest/testuri/13', subresource=None, path='/sub/', uri='/rest/testuri/13/sub'), ]", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock() uri", "test_update_uri(self, mock_wait4task, mock_update, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"resource_data\":", "self.resource_client._helper.build_uri('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception", "{} headers = {\"Content-Type\": \"application/json\", \"Extra\": \"extra\"} self.connection._apiVersion = 300", "\"/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, 'get') def test_get_collection_should_return_list(self, mock_get): mock_get.return_value = {\"members\": [{\"key\":", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body = {\"resource_name\": \"name\"}", "@mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\"", "self.resource_client.get_utilization( fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=[\"startDate=2016-05-30T03:29:42.361Z\", \"endDate=2016-05-31T03:29:42.361Z\"], refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\", "\"/rest/testuri\" mock_put.return_value = None, self.response_body expected = {\"name\": \"test\", \"type\":", "\"do_get\") def test_refresh(self, mock_do_get): updated_data = {\"resource_name\": \"updated name\"} mock_do_get.return_value", "FakeResource(object): def __init__(self, con): self._connection = con self._client = ResourceClient(con,", "@mock.patch.object(connection, 'get') def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345', ['name1=one',", "else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'get') def test_get_utilization_with_args(self,", "mock_get_completed_task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_output_list_when_results(self, mock_get_completed_task, mock_post):", "except exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else: self.fail(\"Expected Exception was", "mock_post_multipart): fake_associated_resurce = mock.Mock() uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "@mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body =", "'post') def test_create_uri(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value", "\"members\": [{\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]}, {\"nextPageUri\": None, \"members\":", "specific language governing permissions and # limitations under the License.", "\"type\": \"type1\"}, {\"name\": \"resource2\", \"type\": \"type1\"} ] self.assertEqual(result_list, expected_list) def", "= '09USE7335NW35' expected_output = '/rest/testuri/09USE7335NW35' result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result)", "path='sub', uri='/rest/testuri/3/sub/4'), dict( resource='5', subresource='/rest/testuri/5/sub/6', path='sub', uri='/rest/testuri/5/sub/6'), dict( resource='/rest/testuri/7', subresource='/rest/testuri/7/sub/8',", "expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\", "uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]}, {'nextPageUri': None,", "[{\"name\": \"EXpected\"}, {\"name\": \"not expected\"}] response = self.resource_client.get_by('name', 'exPEcted') self.assertEqual(response,", "\"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "dict_info, custom_headers=headers) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers(self,", "default_type = {\"type\": \"type1\"} resource1 = {\"name\": \"resource1\"} resource2 =", "test_get_by_with_incorrect_result_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\": \"EXpected\"}, {\"name\": \"not expected\"}] response", "not raised\") def test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI for this", "custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v300(self, mock_patch, mock_ensure_resource):", "'patch') def test_patch_with_custom_headers_v200(self, mock_patch): mock_patch.return_value = {}, {} self.connection._apiVersion =", "mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, timeout=-1) expected_uri = \"/rest/testuri\" mock_post.assert_called_once_with(expected_uri,", "= self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_false_when_error(self, mock_open,", "fake_associated_resurce = mock.Mock() uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "LP # # Licensed under the Apache License, Version 2.0", "{\"resource_name\": \"name\"} mock_post.return_value = self.task, self.task mock_wait4task.return_value = response_body result", "test_update_should_not_override_resource_properties(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\", \"type\": \"anotherType\"} uri", "refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\", "def setUp(self): super(ResourceClientTest, self).setUp() self.host = '127.0.0.1' self.connection = connection(self.host,", "def test_delete_with_force(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value", "\"6\"}]}, {\"nextPageUri\": None, \"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect =", "mock_get): uri_list = [\"/rest/testuri?start=0&count=15\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1],", "self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( '/rest/testuri', {}, custom_headers=None)", "\"members\": None} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(ResourceHelper, \"do_get\") def", "path=\"sub\", uri=\"/rest/testuri/9/sub/10\"), dict( resource=\"/rest/testuri/11\", subresource=\"12\", path=\"/sub/\", uri=\"/rest/testuri/11/sub/12\"), dict( resource=\"/rest/testuri/13\", subresource=None,", "except exceptions.HPOneViewUnknownType as e: self.assertEqual('Unrecognized URI for this resource', e.args[0])", "test_patch_request_when_id_is_provided_v200(self, mock_patch): request_body = [{ 'op': 'replace', 'path': '/name', 'value':", "def test_create_with_empty_dict(self): try: self.resource_client.create({}) except ValueError as e: self.assertTrue(\"Resource\" in", "result = self.resource_helper.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\")", "# you may not use this file except in compliance", "returned by OneView. \"\"\" uri_list = ['/rest/testuri?start=0&count=3', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results", "\"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_force(self, mock_put, mock_laod_resource): dict_to_update = {\"name\":", "mock_delete.assert_called_once_with(self.URI + \"/1?force=True\", custom_headers=None) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_with_custom_headers(self,", "'post') def test_create_without_default_values(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value", "'two': True}) def test_extract_id_from_uri(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155' id = '3518be0e-17c1-4189-8f81-83f3724f6155'", "extracted_id) def test_extract_id_from_uri_with_extra_slash(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id,", "self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceSchema(self.connection) super(ResourceSchemaMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection,", "uri=\"/rest/testuri/3/sub/4\"), dict( resource=\"5\", subresource=\"/rest/testuri/5/sub/6\", path=\"sub\", uri=\"/rest/testuri/5/sub/6\"), dict( resource=\"/rest/testuri/7\", subresource=\"/rest/testuri/7/sub/8\", path=\"sub\",", "uri = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '3518be0e-17c1-4189-8f81-83f3724f6155') def test_extract_id_from_uri_unsupported(self):", "\"&query=name%20NE%20%27WrongName%27\" \\ \"&sort=name%3Aascending\".format(resource_uri=self.URI) self.assertEqual([{\"member\": \"member\"}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def", "expected_resource) def test_merge_default_values(self): default_type = {\"type\": \"type1\"} resource1 = {\"name\":", "{}, {} headers = {\"Content-Type\": \"application/json\", \"Extra\": \"extra\"} self.connection._apiVersion =", "mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection,", "{\"resource_name\": \"a name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict,", "ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri,", "self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v200(self, mock_patch, mock_ensure_resource): uri", "['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"},", "input = \"09USE7335NW35\" expected_output = \"/rest/testuri/09USE7335NW35\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output,", "= self.task, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor,", "uri_list[2], \"members\": [{\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]}, {\"nextPageUri\": None,", "} result = self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def", "= self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'put') def test_update_with_uri_called_once(self,", "= None, self.response_body self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "custom_headers=None) @mock.patch.object(connection, 'post') def test_create_without_default_values(self, mock_post): dict_to_create = {\"resource_name\": \"a", "\"new_name\") mock_patch.assert_called_once_with(uri, request_body, custom_headers={}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v300(self,", "\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_patch(self,", "None, self.response_body filter = \"name='Exchange Server'\" result = self.resource_client.delete_all(filter=filter, force=True,", "\"get\") def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri = \"/rest/testuri/\" filepath", "custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_api_version_200(self, mock_post): dict_to_create = {\"resource_name\": \"a", "'name,owner,modified', scope_uris=scope_uris) uri = '{resource_uri}?start=1' \\ '&count=500' \\ '&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27' \\", "self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_custom_headers(self,", "test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value", "= \"/rest/testuri\" expected = {\"name\": \"test\", \"uri\": uri, \"type\": \"typeV300\"}", "StubResourceZeroBody(self.connection) super(ResourceZeroBodyMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once(self, mock_wait4task,", "self.resource_client.update_with_zero_body(uri=\"/rest/testuri\", custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\")", "{'id': '6'}, {'id': '7'}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self,", "def test_helper_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None, self.response_body filter = \"name='Exchange", "'Resource Name,') @mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = []", "-*- ### # (C) Copyright [2019] Hewlett Packard Enterprise Development", "results result = self.resource_client.get_all() expected_items = [{'id': '1'}, {'id': '2'},", "{\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=15) expected_calls = [call(uri_list[0]), call(uri_list[1]),", "test_get_by_uri(self, mock_get): self.resource_client.get_by_uri(\"/rest/testuri\") mock_get.assert_called_once_with('/rest/testuri') @mock.patch.object(connection, \"get\") def test_get_by_id_with_result(self, mock_get): self.resource_client.get_by_id(\"123\")", "response_body = {\"resource_name\": \"name\"} mock_post.return_value = self.task, self.task mock_wait4task.return_value =", "input = \"/rest/testuri/09USE7335NW3\" expected_output = \"/rest/testuri/09USE7335NW3\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output,", "headers = {\"Extra\": \"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers)", "self.fail(\"Expected Exception was not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri( \"/rest/test/another/resource/uri/09USE7335NW3\")", "\"/rest/fake/resource\") def get_fake(self, uri): return self._client.get(uri) class ResourceClientTest(unittest.TestCase): URI =", "custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_uri(self, mock_wait4task, mock_update): dict_to_update", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "self.task, self.response_body mock_wait4task.return_value = self.task update_task = self.resource_client.update(dict_to_update, False) self.assertEqual(self.task,", "None, self.response_body self.resource_client.update(dict_to_update, uri=\"/path\", custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection,", "{'id': '7'}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\"", "custom_headers=None) @mock.patch.object(connection, 'post') def test_create_uri_with_force(self, mock_post): dict_to_create = {\"resource_name\": \"a", "self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get') def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15',", "e: self.assertTrue(\"id\" in e.args[0]) else: self.fail() def test_create_with_none(self): try: self.resource_client.create(None)", "\"test\"}) mock_do_put.assert_called_once() mock_ensure_resource.assert_called_once() def test_ensure_resource_raise_unique_identifier_exception(self): self.resource_client.data = [] self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers, self.resource_client.ensure_resource_data)", "\"Finished\"} self.response_body = {\"body\": \"body\"} self.custom_headers = {'Accept-Language': 'en_US'} @mock.patch.object(connection,", "] task_with_output = self.task.copy() task_with_output['taskOutput'] = task_output mock_post.return_value = self.task,", "test_get_collection_should_return_list(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} collection", "fake_response_body) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path =", "mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_open.return_value =", "self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_default_api_version_300(self,", "\"09USE7335NW35\" expected_output = \"/rest/testuri/09USE7335NW35\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def", "mock_get.return_value = {\"nextPageUri\": None, \"members\": None} result = self.resource_client.get_all() self.assertEqual(result,", "self.resource_client.get_all( 1, 500, filter, query, sort, view, 'name,owner,modified', scope_uris=scope_uris) uri", "= [] mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\")", "def test_patch_request_custom_headers(self, mock_task, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" dict_info =", "RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method) class StubResourceFileHandler(ResourceFileHandlerMixin, Resource):", "@mock.patch.object(connection, \"get\") def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") class", "\"get\") def test_get_utilization_with_args(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization(fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=\"startDate=2016-05-30T03:29:42.361Z\", refresh=True, view=\"day\") expected_uri", "\"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri)", "@mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri =", "\"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all() expected_calls = [call(uri_list[0]),", "\"get\") def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection,", "file operations\"\"\" class StubResourceZeroBody(ResourceZeroBodyMixin, Resource): \"\"\"Stub class to test resoruce", "self.response_body mock_wait4task.return_value = self.task self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(force=True) mock_delete.assert_called_once_with(\"/rest/testuri?force=True\",", "subresource=\"/rest/testuri/7/sub/8\", path=\"sub\", uri=\"/rest/testuri/7/sub/8\"), dict( resource=None, subresource=\"/rest/testuri/9/sub/10\", path=\"sub\", uri=\"/rest/testuri/9/sub/10\"), dict( resource=\"/rest/testuri/11\",", "mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) def test_merge_api_default_values(self): resource = {'name': 'resource1'} default_values", "result = resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart(self,", "'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_return_entity(self, mock_wait4task, mock_put): dict_to_update = {", "except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail('Expected Exception was", "@mock.patch.object(connection, \"post\") def test_create_with_api_version_200(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"}", "= ResourceClient(self.connection, self.URI) resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY,", "mock_download_to_stream.assert_called_once_with(mock.ANY, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream):", "5] dict_transformed = transform_list_to_dict(list=list) self.assertEqual(dict_transformed, {'5': True, 'four': True, 'one':", "under the Apache License, Version 2.0 (the \"License\"); # you", "mock_wait4task.return_value = entity result = self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name',", "'/rest/testuri?start=0&count=-1' members = [{'id': '1'}, {'id': '2'}, {'id': '3'}] mock_get.return_value", "self.resource_client._merge_default_values() self.task = {\"task\": \"task\", \"taskState\": \"Finished\"} self.response_body = {\"body\":", "uri=uri) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_uri(self,", "expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_api_version_200(self, mock_post): dict_to_create = {\"resource_name\":", "transform_list_to_dict(list=list) self.assertEqual(dict_transformed, {'5': True, 'four': True, 'one': True, 'tree': 3,", "as e: self.assertTrue(\"id\" in e.args[0]) else: self.fail() def test_create_with_none(self): try:", "'7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=15) expected_calls = [call(uri_list[0]),", "test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task", "'replace', '/name', 'new_name', -1) mock_wait4task.assert_called_once_with(self.task, mock.ANY) def test_delete_with_none(self): try: self.resource_client.delete(None)", "test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value = self.task, {} mock_wait4task.return_value = self.task", "200 self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource,", "@mock.patch.object(connection, \"post\") def test_create_with_custom_headers(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"}", "-1) mock_wait4task.assert_called_once_with(self.task, mock.ANY) def test_delete_with_none(self): try: self.resource_client.delete(None) except ValueError as", "'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch):", "{} self.connection._apiVersion = 200 self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', 70)", "= None, self.response_body resource_client = ResourceClient(self.connection, self.URI) resource_client.update(dict_to_update, uri=uri) mock_put.assert_called_once_with(uri,", "in e.args[0]) else: self.fail() @mock.patch.object(connection, 'get') def test_get_with_uri_should_work(self, mock_get): mock_get.return_value", "class StubResourceSchema(ResourceSchemaMixin, Resource): \"\"\"Stub class to test resource schema methods\"\"\"", "body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with('/rest/testuri/id', dict_info, custom_headers=headers) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def", "def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=1\"] results =", "self.resource_helper.get_collection(filter=[\"name1=one\", \"name2=two\", \"name=three\"]) mock_get.assert_called_once_with(self.URI + \"?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, \"get\") def test_get_collection_should_return_list(self,", "test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = [] response = self.resource_client.get_by_name('Resource Name,') self.assertIsNone(response)", "= results result = self.resource_client.get_all(count=15) expected_items = [{\"id\": \"1\"}, {\"id\":", "else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri(\"/rest/\")", "mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\")", "'uri': uri } result = self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection,", "@mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_patch_return_entity(self, mock_wait4task, mock_patch, mock_ensure_resource): entity", "None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with(\"/rest/testuri\", mock.ANY, mock.ANY) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_uri(self, mock_wait4task,", "'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch): dict_info = {\"resource_name\":", "= None, self.response_body self.resource_client.update(dict_to_update, uri=uri, force=True) expected_uri = \"/rest/resource/test?force=True\" mock_put.assert_called_once_with(expected_uri,", "uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = False mock_open.return_value = io.StringIO() result", "\"uri\": uri, \"type\": \"typeV300\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) expected_uri", "'wait_for_task') def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update): mock_update.return_value = self.task, self.task mock_wait4task.return_value", "mock_get.return_value = { \"nextPageUri\": uri, \"members\": members, \"uri\": uri }", "test_patch_with_custom_headers_v300(self, mock_patch, mock_ensure_resource): mock_patch.return_value = {}, {} self.resource_client.patch(\"operation\", \"/field\", \"value\",", "[{ 'op': 'replace', 'path': '/name', 'value': 'new_name', }] mock_patch.return_value =", "'extra'} self.connection._apiVersion = 300 resource_client = ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info,", "result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input = '/rest/testuri/09USE7335NW3'", "'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update): mock_update.return_value = self.task,", "self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, task_output) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_empty_list_when_output_is_empty(self, mock_get_completed_task,", "= {'nextPageUri': None, 'members': None} result = self.resource_client.get_all() self.assertEqual(result, [])", "mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" dict_info = {\"resource_name\": \"a name\"}", "def test_create_report_should_return_empty_list_when_output_is_empty(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = []", "mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = True", "'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put): response_body = {\"resource_name\":", "\"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = True mock_open.return_value = io.StringIO()", "mock_get.assert_called_once_with(self.URI + \"?filter=name%3Dname\") @mock.patch.object(connection, \"get\") def test_get_collection_with_path(self, mock_get): mock_get.return_value =", "mock_ensure_resource): self.resource_client.get_utilization(fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=\"startDate=2016-05-30T03:29:42.361Z\", refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\"", "exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_merge_resources(self): resource1", "self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def", "= self.task, self.task mock_wait4task.return_value = response_body result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\",", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers(self, mock_task, mock_patch,", "class to test resource schema methods\"\"\" class StubResource(Resource): \"\"\"Stub class", "= {\"resource_data\": \"resource_data\", \"uri\": \"a_uri\"} mock_update.return_value = self.task, self.response_body mock_wait4task.return_value", "\"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v200(self, mock_patch, mock_ensure_resource): mock_patch.return_value = {},", "not raised') @mock.patch.object(connection, 'get') def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1',", "\"resource_name\": \"a name\", } created_resource = { \"resource_id\": \"123\", \"resource_name\":", "expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_uri_with_defaults(self, mock_get): self.resource_client.get_utilization('/rest/testuri/09USE7335NW3')", "dict_to_create = [{\"resource_name\": \"a name\"}] mock_post.return_value = {}, {} resource_client", "= {}, {} self.connection._apiVersion = 200 self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with(uri,", "self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_without_default_values(self,", "self.connection._apiVersion = 200 self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with(", "path=\"/sub/\", uri=\"/rest/testuri/11/sub/12\"), dict( resource=\"/rest/testuri/13\", subresource=None, path=\"/sub/\", uri=\"/rest/testuri/13/sub\"), ] for option", "mock_get, mock_ensure_resource): self.resource_client.get_utilization( fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=[\"startDate=2016-05-30T03:29:42.361Z\", \"endDate=2016-05-31T03:29:42.361Z\"], refresh=True, view=\"day\") expected_uri =", "= [{\"name\": \"testname\", \"uri\": \"/rest/testuri\"}] self.resource_client.data = {\"name\": \"testname\"} mock_do_get.return_value", "test_update_should_not_override_resource_properties(self, mock_put): dict_to_update = {\"name\": \"test\", \"type\": \"anotherType\"} uri =", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_uri_is_provided(self, mock_patch, mock_ensure_resource): request_body =", "timeout=-1) self.assertEqual(result, self.response_body) class ResourcePatchMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1',", "@mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_wait_task_completion(self, mock_get_completed_task, mock_post): task_with_output =", "custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_custom_headers(self, mock_ensure_resource,", "\"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value') uri =", "exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_build_subresource_uri(self): options", "@mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\"", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "mock_get.return_value = {\"members\": [{\"member\": \"member\"}]} result = self.resource_helper.get_all( 1, 500,", "exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def", "result = self.resource_helper.get_all( 1, 500, filter, query, sort) uri =", "name\"}] mock_post.return_value = {}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create,", "'resource1'} default_values = { '200': {\"type\": \"EnclosureGroupV200\"}, '300': {\"type\": \"EnclosureGroupV300\"}", "\"put\") def test_update_with_default_api_version_300(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\"} uri", "\"taskState\": \"Finished\"} self.response_body = {\"body\": \"body\"} self.custom_headers = {'Accept-Language': 'en_US'}", "self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'put') def test_update_with_zero_body_without_task(self, mock_put):", "mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body() mock_post.assert_called_once_with( \"/rest/testuri\",", "mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{\"nextPageUri\": uri_list[1],", "test_get_collection_with_path(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(path=\"/test\") mock_get.assert_called_once_with(self.URI + \"/test\") @mock.patch.object(connection,", "mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v300(self,", "was not raised\") @mock.patch.object(connection, 'post') def test_create_when_the_resource_is_a_list(self, mock_post): dict_to_create =", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task", "\"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_without_default_values(self, mock_put, mock_ensure_resource): uri = \"/rest/testuri\"", "\"\"\" In this case, the user provides a maximum number", "= ResourceClient(self.connection, self.URI) resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with(", "\"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, \"SPPgen9snap6.2015_0405.81.iso\")", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_force(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value = self.task,", "expected_items) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In this case,", "self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with(uri, dict_info, custom_headers=headers) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\")", "self.resource_client._helper.build_subresource_uri(option[\"resource\"], option[\"subresource\"], option[\"path\"]) self.assertEqual(uri, option[\"uri\"]) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client._helper.build_subresource_uri(None, \"123456\",", "def test_raise_unavailable_method_exception(self): self.assertRaises(exceptions.HPOneViewUnavailableMethod, unavailable_method) class FakeResource(object): def __init__(self, con): self._connection", "= {\"resource_name\": \"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI,", "result = self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_false_when_error(self,", "mock_post.return_value = self.task, self.task mock_wait4task.return_value = response_body new_resource = self.resource_client.create_with_zero_body(timeout=-1)", "= self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(ResourceHelper, \"do_get\") def test_refresh(self, mock_do_get): updated_data", "class StubResource(Resource): \"\"\"Stub class to test resource common methods\"\"\" URI", "\"/1?force=True\", custom_headers=None) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_with_custom_headers(self, mock_wait4task, mock_delete):", "= \"/rest/resource/test?force=True\" mock_put.assert_called_once_with(expected_uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_api_version_200(self, mock_put):", "'/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_request_when_uri_is_provided(self, mock_patch): request_body", "is returned by OneView. \"\"\" uri_list = [\"/rest/testuri?start=0&count=3\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"]", "mock.ANY, custom_headers={'Accept-Language': 'en_US', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "extract_id_from_uri(uri) self.assertEqual(extracted_id, '3518be0e-17c1-4189-8f81-83f3724f6155') def test_extract_id_from_uri_unsupported(self): # This example is not", "def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client.build_uri( '/rest/test/another/resource/uri/09USE7335NW3') except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI,", "{\"id\": \"6\"}]}, {'nextPageUri': None, \"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect", "mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_force(self,", "expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_default_api_version_300(self, mock_post): dict_to_create = {\"resource_name\":", "'6'}, {'id': '7'}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get):", "= \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" fake_resource = FakeResource(None) try: fake_resource.get_fake(uri) except exceptions.HPOneViewUnknownType as", "\"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart(self, mock_post_multipart): uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResource(self.connection) super(ResourceTest,", "e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_update_with_empty_dict(self): try: self.resource_client.update({})", "'/rest/testuri?start=6&count=1'] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"}, {\"id\": \"2\"},", "file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = True mock_open.return_value", "governing permissions and # limitations under the License. ### import", "test_update_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"resource_name\":", "path='sub', uri='/rest/testuri/5/sub/6'), dict( resource='/rest/testuri/7', subresource='/rest/testuri/7/sub/8', path='sub', uri='/rest/testuri/7/sub/8'), dict( resource=None, subresource='/rest/testuri/9/sub/10',", "mock_task, mock_patch): dict_info = {\"resource_name\": \"a name\"} mock_patch.return_value = {},", "[{'name': 'expected'}, {'name': 'not expected'}]) mock_get_all.assert_called_once_with(filter=\"\\\"connection.name='expected'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def", "exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail('Expected Exception was not", "uri=\"/rest/testuri/1/sub/2\"), dict( resource=\"/rest/testuri/3\", subresource=\"4\", path=\"sub\", uri=\"/rest/testuri/3/sub/4\"), dict( resource=\"5\", subresource=\"/rest/testuri/5/sub/6\", path=\"sub\",", "test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None, self.response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration',", "mock_wait4task.return_value = response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body)", "def test_get_by_property_with_uri(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/testuri/5435534/sub') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri/5435534/sub') @mock.patch.object(ResourceClient, 'get_all')", "\"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() mock_wait4task.return_value =", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_by_uri_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri", "test_get_by_property_with_uri(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/testuri/5435534/sub') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri/5435534/sub') @mock.patch.object(ResourceClient, 'get_all') def", "custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v300(self, mock_patch): mock_patch.return_value = {},", "\"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = True mock_open.return_value = io.StringIO() result = self.resource_client.download(uri,", "\"a name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection,", "was not raised\") def test_build_subresource_uri(self): options = [ dict( resource='1',", "e.args[0]) else: self.fail() @mock.patch.object(connection, 'get') def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI", "self.task self.resource_client.create_with_zero_body() mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "self.assertEqual(result, created_resource) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_create(self, mock_wait4task, mock_post):", "\"put\") def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None, self.response_body self.resource_client.URI =", "\"test\", \"type\": \"anotherType\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body", "= {\"name\": \"test\", \"type\": self.TYPE_V300} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict,", "mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "io.StringIO() mock_open.return_value = fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, mock.ANY)", "mock_post.return_value = {}, {} self.connection._apiVersion = 200 expected_dict = {\"resource_name\":", "mock_get): mock_get.return_value = {\"nextPageUri\": None, \"members\": []} result = self.resource_client.get_all()", "to test resource schema methods\"\"\" class StubResource(Resource): \"\"\"Stub class to", "\"test\"} uri = \"/rest/testuri\" mock_put.return_value = None, self.response_body expected_dict =", "mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_id_with_defaults(self, mock_get): self.resource_client.get_utilization('09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization'", "'/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"}, {\"id\":", "TYPE_V300 = 'typeV300' DEFAULT_VALUES = { '200': {'type': TYPE_V200}, '300':", "this resource', e.args[0]) else: self.fail() @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value = self.task, self.task mock_wait4task.return_value", "self.assertEqual(response, [{\"name\": \"EXpected\"}]) mock_get_all.assert_called_once_with(filter=\"\\\"name='exPEcted'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_skip_autofix(self, mock_get_all):", "response = self.resource_client.update(dict_to_update, uri=uri) self.assertEqual(self.response_body, response) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection,", "uri): return self._client.get(uri) class ResourceClientTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200 =", "= self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor,", "ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources,", "mock_get.assert_called_once_with(self.URI + \"/12345?filter=name%3Dname\") @mock.patch.object(connection, 'get') def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value =", "= None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with(\"/rest/testuri\", mock.ANY, mock.ANY) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor,", "def test_create_when_the_resource_is_a_list(self, mock_post): dict_to_create = [{\"resource_name\": \"a name\"}] mock_post.return_value =", "= [ {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-24T15: 32: 50.889Z\"}, {\"type\": \"FCIssueResponseV2\",", "mock_do_get.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_raise_resource_not_found_exception_without_uri(self,", "= self.resource_helper.get_collection() self.assertEqual(len(collection), 2) def test_build_uri_with_id_should_work(self): input = \"09USE7335NW35\" expected_output", "mock_open.return_value = io.StringIO() result = self.resource_client.download(uri, file_path) self.assertFalse(result) class ResourceZeroBodyMixinTest(BaseTest):", "custom_headers={}) @mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v300(self, mock_patch): request_body = [{ 'op':", "@mock.patch.object(Resource, \"get_by\") def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = [] response =", "def test_patch_request_when_uri_is_provided(self, mock_patch): request_body = [{ 'op': 'replace', 'path': '/name',", "StubResourceSchema(ResourceSchemaMixin, Resource): \"\"\"Stub class to test resource schema methods\"\"\" class", "\"task\", \"taskState\": \"Finished\"} self.response_body = {\"body\": \"body\"} self.custom_headers = {\"Accept-Language\":", "self.resource_client.data = {\"name\": \"testname\"} mock_get_by.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True)", "\"type\": \"typeV300\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None)", "= [{'id': '1'}, {'id': '2'}, {'id': '3'}, {'id': '4'}, {'id':", "'value': 'new_name', }] mock_patch.return_value = {}, {} resource_client = ResourceClient(self.connection,", "= {}, {} headers = {'Content-Type': 'application/json', 'Extra': 'extra'} self.connection._apiVersion", "{} try: self.resource_client.create_report(\"/rest/path/create-report\") except exceptions.HPOneViewException as exception: self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED, exception.args[0]) else:", "self.URI + \"/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" self.resource_client.get(uri) mock_get.assert_called_once_with(uri) def test_get_with_uri_with_incompatible_url_shoud_fail(self): message = \"Unrecognized", "= \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = True mock_open.return_value = io.StringIO() result =", "= \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value') uri", "mock_post.return_value = {}, {} expected_dict = {\"resource_name\": \"a name\", \"type\":", "self.resource_client.get_utilization('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "@mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value =", "mock_get.return_value = {'nextPageUri': None, 'members': None} result = self.resource_client.get_all() self.assertEqual(result,", "mock_post.return_value = self.task, {} mock_wait4task.return_value = created_resource result = self.resource_client.create(dict_to_create,", "custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_return_entity(self,", "None, \"members\": []} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, \"get\")", "test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value", "def test_get_utilization_with_multiple_filters(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization( fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=[\"startDate=2016-05-30T03:29:42.361Z\", \"endDate=2016-05-31T03:29:42.361Z\"], refresh=True, view=\"day\")", "\"get\") def test_get_by_id_with_result(self, mock_get): self.resource_client.get_by_id(\"123\") mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_by_id_without_result(self,", "result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, \"put\") def", "mock_put, mock_laod_resource): dict_to_update = {\"name\": \"test\"} uri = \"/rest/testuri\" expected", "self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_post.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", {}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task')", "\"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_return_entity(self, mock_wait4task,", "self.task self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, self.resource_client.data) mock_update.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\")", "@mock.patch.object(connection, \"put\") def test_update_with_force(self, mock_put, mock_laod_resource): dict_to_update = {\"name\": \"test\"}", "not raised\") @mock.patch.object(connection, 'post') def test_create_when_the_resource_is_a_list(self, mock_post): dict_to_create = [{\"resource_name\":", "= {}, {} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection,", "def get_fake(self, uri): return self._client.get(uri) class ResourceClientTest(unittest.TestCase): URI = \"/rest/testuri\"", "'name=three']) mock_get.assert_called_once_with(self.URI + \"/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, 'get') def test_get_collection_should_return_list(self, mock_get): mock_get.return_value", "{'name': 'not expected'}]) mock_get_all.assert_called_once_with(filter=\"\\\"connection.name='expected'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with_uri(self, mock_get_all):", "@mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): response_body", "@mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with__invalid_uri(self, mock_get_all): try: self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/other/5435534/sub') except", "body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( '/rest/testuri/id', dict_info, custom_headers={'Extra': 'extra', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection,", "\"name='Exchange Server'\" result = self.resource_client.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(connection, 'delete')", "file_path) self.assertTrue(result) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path", "Apache License, Version 2.0 (the \"License\"); # you may not", "{'id': '5'}, {'id': '6'}, {'id': '7'}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get')", "self.resource_client.URI = \"/rest/enclosures\" result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, self.response_body)", "TYPE_V200}, '300': {'type': TYPE_V300} } def setUp(self): super(ResourceClientTest, self).setUp() self.host", "= self.task self.resource_client.delete('1', custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={'Accept-Language': 'en_US'}) def test_delete_dict_invalid_uri(self): dict_to_delete", "expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES)", "either express or implied. # See the License for the", "mock_get.assert_called_once_with(self.URI + \"/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, 'get') def test_get_collection_should_return_list(self, mock_get): mock_get.return_value =", "uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_should_not_override_resource_properties(self, mock_put):", "ValueError as e: self.assertTrue(\"id\" in e.args[0]) else: self.fail() def test_get_collection_with_none(self):", "self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream):", "'wait_for_task') def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update): mock_update.return_value = self.task, self.task mock_wait4task.return_value", "'3518be0e-17c1-4189-8f81-83f3724f6155') def test_extract_id_from_uri_unsupported(self): # This example is not supported yet", "{'id': '5'}, {'id': '6'}]}, {'nextPageUri': None, 'members': [{'id': '7'}, {'id':", "self.resource_helper.get_all( 1, 500, filter, query, sort) uri = \"{resource_uri}?start=1\" \\", "timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_api_version_200(self, mock_post): dict_to_create", "= \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath,", "def test_patch_return_entity(self, mock_wait4task, mock_patch, mock_ensure_resource): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value", "mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_without_default_values(self, mock_put,", "'wb') mock_download_to_stream.assert_called_once_with(fake_file, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_true_when_success(self, mock_open,", "\"members\": [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}]}, {\"nextPageUri\": uri_list[2], \"members\":", "= None, self.response_body self.resource_client.update(dict_to_update) expected_uri = \"/rest/testuri\" mock_put.assert_called_once_with(expected_uri, expected, custom_headers=None)", "custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor,", "custom_headers=None) def test_merge_api_default_values(self): resource = {'name': 'resource1'} default_values = {", "exception.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'post') def", "\"/rest/testuri\", request_body, custom_headers={\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_uri_is_provided(self,", "= mock.Mock() uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value =", "{}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.patch( '123a53cz', 'replace', '/name',", "= ResourceClient(self.connection, self.URI) result = resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) def", "test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources') except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0])", "self.resource_client.create({}) self.assertNotEqual(self.resource_client, new_instance) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_create(self, mock_wait4task,", "{\"name\": \"test\"} uri = \"/rest/testuri\" mock_put.return_value = None, self.response_body expected_dict", "[{'id': '7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all() expected_calls =", "@mock.patch.object(connection, 'put') def test_update_with_custom_headers(self, mock_put): dict_to_update = {\"name\": \"test\"} mock_put.return_value", "True, 'tree': 3, 'two': True}) def test_extract_id_from_uri(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155'", "False) self.assertEqual(self.task, self.resource_client.data) mock_update.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\")", "self.assertEqual(result, task_output) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_empty_list_when_output_is_empty(self, mock_get_completed_task, mock_post):", "mock_download_to_stream.assert_called_once_with(mock.ANY, uri) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path", "self.assertTrue(\"id\" in e.args[0]) else: self.fail() def test_get_collection_with_none(self): try: self.resource_client.get_collection(None) except", "uri } result = self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get')", "= [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}]},", "request_body, custom_headers={\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_uri_is_provided(self, mock_patch,", "\"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post')", "fake_resource.get_fake(uri) except exceptions.HPOneViewUnknownType as exception: self.assertEqual(message, exception.args[0]) else: self.fail(\"Expected Exception", "exceptions.HPOneViewException as exception: self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED, exception.args[0]) else: self.fail(\"Expected Exception was not", "self.task, self.task mock_wait4task.return_value = self.task self.resource_client.URI = \"/rest/enclosures\" self.resource_client.update_with_zero_body(\"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1)", "mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "= \"/rest/testuri\" dict_to_update = {\"resource_data\": \"resource_data\", \"uri\": uri} expected =", "ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() @mock.patch.object(connection, 'delete')", "= task_output mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output result", "= ['/rest/testuri?start=0&count=3', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1], 'members': [{'id':", "mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream')", "result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input = \"/rest/testuri/09USE7335NW3\"", "self.task update_task = self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, update_task) mock_update.assert_called_once_with(\"a_uri\", dict_to_update, custom_headers=None)", "\"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = True mock_open.return_value = io.StringIO()", "hpOneView.resources.resource import (ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource,", "\"do_get\") @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_should_update_resource_data(self, mock_do_get, mock_get_by): get_by_return_value = [{\"name\":", "self.assertEqual(result, response_body) @mock.patch.object(connection, 'put') def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None,", "{\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-13T14: 10: 50.322Z\"} ] task_with_output = self.task.copy()", "self.assertRaises(exceptions.HPOneViewUnavailableMethod, unavailable_method) class FakeResource(object): def __init__(self, con): self._connection = con", "= True mock_open.return_value = io.StringIO() result = self.resource_client.download(uri, file_path) self.assertTrue(result)", "self).setUp() self.host = '127.0.0.1' self.connection = connection(self.host, 300) self.resource_client =", "@mock.patch.object(connection, 'put') def test_update_with_force(self, mock_put): dict_to_update = {\"name\": \"test\"} uri", "ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def", "self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task')", "= \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1)", "self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In this", "mock_update, mock_ensure_resource): mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body(uri=\"/rest/testuri\",", "}] mock_patch.return_value = {}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.patch(", "300) self.resource_client = StubResourceUtilization(self.connection) super(ResourceUtilizationMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\")", "raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri(\"/rest/\") except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI,", "{'id': '4'}, {'id': '5'}, {'id': '6'}, {'id': '7'}] self.assertSequenceEqual(result, expected_items)", "@mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_output_list_when_results(self, mock_get_completed_task, mock_post): task_output =", "uri='/rest/testuri/5435534/sub') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with__invalid_uri(self, mock_get_all): try: self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/other/5435534/sub')", "test_get_utilization_by_uri_with_defaults(self, mock_get): self.resource_client.get_utilization('/rest/testuri/09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) def test_get_utilization_with_empty(self): try:", "hpOneView.connection import connection from hpOneView import exceptions from hpOneView.resources.resource import", "= self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('1', custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY,", "\"patch\") def test_patch_with_custom_headers_v200(self, mock_patch, mock_ensure_resource): mock_patch.return_value = {}, {} self.connection._apiVersion", "mock from mock import call from tests.test_utils import mock_builtin from", "self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(ResourceHelper, \"do_get\") def test_refresh(self, mock_do_get): updated_data =", "[] mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output result =", "{\"body\": \"body\"} self.custom_headers = {\"Accept-Language\": \"en_US\"} class ResourceFileHandlerMixinTest(BaseTest): def setUp(self):", "mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch('/rest/testuri/123', 'operation', '/field',", "filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() mock_wait4task.return_value = fake_associated_resurce", "import mock from mock import call from tests.test_utils import mock_builtin", "'get') def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345', ['name1=one', 'name2=two',", "self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( '/rest/testuri', {}, custom_headers=None) @mock.patch.object(connection,", "id = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(id, extracted_id) def test_extract_id_from_uri_with_extra_slash(self):", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "@mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task,", "'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update): mock_update.return_value = self.task,", "self.resource_client.create_with_zero_body() self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "{\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update,", "200 self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body,", "dict( resource=\"/rest/testuri/13\", subresource=None, path=\"/sub/\", uri=\"/rest/testuri/13/sub\"), ] for option in options:", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_by_id_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body", "\"resource_id\": \"123\", \"resource_name\": \"a name\", } mock_post.return_value = self.task, {}", "mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.delete('1', custom_headers=self.custom_headers)", "[{\"id\": \"7\"}]}] mock_get.side_effect = results result = self.resource_client.get_all() expected_items =", "+ \"/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" self.resource_client.get(uri) mock_get.assert_called_once_with(uri) def test_get_with_uri_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI", "test_ensure_resource_without_data_update(self, mock_get_by): mock_get_by.return_value = [] actual_result = self.resource_client.ensure_resource_data(update_data=False) expected_result =", "\"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0])", "self.resource_client.create(dict_to_create, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_without_default_values(self, mock_post):", "FC Network'\" sort = 'name:ascending' query = \"name NE 'WrongName'\"", "\\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\" \\ \"&refresh=true\" \\ \"&view=day\"", "\"a name\"}] mock_post.return_value = {}, {} resource_client = ResourceClient(self.connection, self.URI)", "resource_client = ResourceClient(self.connection, self.URI) result = resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected)", "= {\"name\": \"resource2\", \"type\": \"resource\", \"port\": \"1\"} merged_resource = merge_resources(resource1,", "{} self.connection._apiVersion = 200 self.resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY,", "'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def", "\"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value =", "\"3\"}]}, {\"nextPageUri\": uri_list[2], \"members\": [{\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]},", "delete_task) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_by_id_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value", "test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri(\"/rest/\") except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else:", "uri='/rest/testuri/11/sub/12'), dict( resource='/rest/testuri/13', subresource=None, path='/sub/', uri='/rest/testuri/13/sub'), ] for option in", "= ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( '/rest/testuri/id', dict_info, custom_headers={'Extra':", "self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() @mock.patch.object(connection, 'delete') def test_delete_with_dict_uri(self, mock_delete):", "{\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, \"get\") def", "\"typeV300\"} self.resource_client.data = {'uri': uri} expected = {\"name\": \"test\", \"type\":", "@mock.patch.object(connection, \"post\") def test_create_uri(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"}", "self.assertTrue(result) @mock.patch.object(connection, 'delete') def test_helper_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None, self.response_body", "= io.StringIO() result = self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\"))", "try: self.resource_client.get(None) except ValueError as e: self.assertTrue(\"id\" in e.args[0]) else:", "under the License. ### import io import unittest import mock", "'&query=name%20NE%20%27WrongName%27' \\ '&sort=name%3Aascending' \\ '&view=%22%7Bview-name%7D%22' \\ '&fields=name%2Cowner%2Cmodified' \\ '&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI) self.assertEqual([{'member':", "mock_get_all): try: self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/other/5435534/sub') except exceptions.HPOneViewUnknownType as e: self.assertEqual('Unrecognized", "connection('127.0.0.1', 300) self.resource_client = StubResourcePatch(self.connection) super(ResourcePatchMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '') def test_extract_id_from_uri_passing_id(self): uri = '3518be0e-17c1-4189-8f81-83f3724f6155'", "TYPE_V200 = 'typeV200' TYPE_V300 = 'typeV300' DEFAULT_VALUES = { '200':", "= 'name:ascending' query = \"name NE 'WrongName'\" view = '\"{view-name}\"'", "StubResourcePatch(ResourcePatchMixin, Resource): \"\"\"Stub class to test resource patch operations\"\"\" class", "methods\"\"\" class StubResourceSchema(ResourceSchemaMixin, Resource): \"\"\"Stub class to test resource schema", "scope_uris=scope_uris) uri = '{resource_uri}?start=1' \\ '&count=500' \\ '&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27' \\ '&query=name%20NE%20%27WrongName%27'", "= None, self.response_body expected = {\"name\": \"test\", \"type\": \"anotherType\", \"uri\":", "@mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value =", "{\"name\": \"resource2\", \"type\": \"type1\"} ] self.assertEqual(result_list, expected_list) def test_raise_unavailable_method_exception(self): self.assertRaises(exceptions.HPOneViewUnavailableMethod,", "self.resource_client.data) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_custom_headers(self,", "as e: self.assertTrue(\"field\" in e.args[0]) else: self.fail() @mock.patch.object(connection, 'get') def", "self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "self.resource_client.delete() self.assertTrue(result) @mock.patch.object(connection, 'delete') def test_helper_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None,", "setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceUtilization(self.connection) super(ResourceUtilizationMixinTest, self).setUp(self.resource_client)", "300 resource_client = ResourceClient(self.connection, self.URI) resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( '/rest/testuri/id',", "def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results =", "mock_get.side_effect = results self.resource_client.get_all(count=15) expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list,", "DEFAULT_VALUES = { '200': {'type': TYPE_V200}, '300': {'type': TYPE_V300} }", "\"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}, {\"id\": \"7\"}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection,", "\"test\", \"type\": self.TYPE_V200} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection,", "self.connection._apiVersion = 200 self.resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY,", "mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_uri_with_defaults(self, mock_get): self.resource_client.get_utilization('/rest/testuri/09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization'", "mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_custom_headers(self, mock_post): dict_to_create =", "self.resource_client.ensure_resource_data(update_data=False) expected_result = None self.assertEqual(actual_result, expected_result) @mock.patch.object(connection, \"get\") def test_get_all_called_once(self,", "raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client.build_uri('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID,", "mock_get): \"\"\" In this case, the user provides a maximum", "e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else: self.fail('Expected Exception was not raised') @mock.patch.object(connection,", "= '/rest/testuri?start=0&count=-1' members = [{'id': '1'}, {'id': '2'}, {'id': '3'}]", "{} self.resource_client.patch( '/rest/testuri/123a53cz', 'replace', '/name', 'new_name', 60) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body,", "= fake_associated_resurce result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, \"post_multipart_with_response_handling\")", "custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "None, self.response_body self.resource_client.update(dict_to_update) self.assertEqual(self.response_body, self.resource_client.data) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\")", "= self.resource_client.delete('1', force=True, timeout=-1) self.assertEqual(self.task, delete_task) mock_delete.assert_called_once_with(self.URI + \"/1?force=True\", custom_headers=None)", "self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_raise_resource_not_found_exception_without_uri(self, mock_get_by): self.resource_client.data = {\"name\": \"testname\"}", "{'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, 'get') def", "'/name', 'value': 'new_name', }] mock_patch.return_value = {}, {} self.resource_client.patch( '/rest/testuri/123a53cz',", "dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value = {}, {} resource_client", "mock_put.return_value = self.task, {} mock_wait4task.return_value = dict_to_update self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(self.resource_client.data,", "'/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'put') def test_update_with_uri_called_once(self, mock_put): dict_to_update", "= fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, mock.ANY) @mock.patch.object(connection, \"download_to_stream\")", "= self.task, self.task mock_wait4task.return_value = self.task self.resource_client.URI = \"/rest/enclosures\" self.resource_client.update_with_zero_body(\"/rest/enclosures/09USE133E5H4/configuration\",", "uri_list = [\"/rest/testuri?start=0&count=15\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1], \"members\":", "self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_delete.assert_called_once_with(uri) @mock.patch.object(connection, 'delete') def test_delete_all_should_return_true(self, mock_delete): mock_delete.return_value", "= '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri, custom_headers=mock.ANY)", "dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create,", "def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value = self.task, self.task mock_wait4task.return_value", "= self.task self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"})", "mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_default_api_version_300(self, mock_put): dict_to_update =", "self.task self.resource_client.create_with_zero_body('1', custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor,", "mock_get.return_value = {} uri = self.URI + \"/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" self.resource_client.get(uri) mock_get.assert_called_once_with(uri)", "file_path) self.assertTrue(result) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path", "mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_true_when_success(self,", "custom_headers=headers) mock_patch.assert_called_once_with('/rest/testuri/id', dict_info, custom_headers=headers) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers(self,", "custom_headers=None) def test_delete_with_empty_dict(self): try: self.resource_client.delete({}) except ValueError as e: self.assertTrue(\"Resource\"", "mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri/5435534/sub') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with__invalid_uri(self, mock_get_all): try: self.resource_client.get_by('name', 'MyFibreNetwork',", "self.task self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(force=True) mock_delete.assert_called_once_with(\"/rest/testuri?force=True\", custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\")", "self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15',", "= {\"name\": \"test\", \"type\": \"anotherType\"} uri = \"/rest/testuri\" mock_put.return_value =", "\"/12345\") @mock.patch.object(connection, 'get') def test_get_collection_with_filter(self, mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345',", "{'Content-Type': 'application/json', 'Extra': 'extra'} self.connection._apiVersion = 300 resource_client = ResourceClient(self.connection,", "True mock_open.return_value = io.StringIO() result = self.resource_client.download(uri, file_path) self.assertTrue(result) @mock.patch.object(connection,", "mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1],", "'/rest/testuri', {}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_and_custom_headers(self, mock_wait4task,", "as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() @mock.patch.object(connection, 'delete') def", "= ResourceClient(self.connection, self.URI) self.task = {\"task\": \"task\", \"taskState\": \"Finished\"} self.response_body", "= {\"members\": [{\"member\": \"member\"}]} result = self.resource_client.get_all( 1, 500, filter,", "'en_US', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_patch_return_entity(self, mock_wait4task,", "default_values = { '200': {\"type\": \"EnclosureGroupV200\"}, '300': {\"type\": \"EnclosureGroupV300\"} }", "uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body expected_dict = {\"name\":", "= self.resource_client.get_collection('12345') self.assertEqual(len(collection), 2) @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property(self, mock_get_all): self.resource_client.get_by('name',", "test_update_uri(self, mock_wait4task, mock_update): dict_to_update = {\"resource_data\": \"resource_data\", \"uri\": \"a_uri\"} mock_update.return_value", "60) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v200(self,", "def test_update_with_empty_dict(self): try: self.resource_client.update({}) except ValueError as e: self.assertTrue(\"Resource\" in", "task_with_output self.resource_client.create_report(\"/rest/path/create-report\") mock_post.assert_called_once_with(\"/rest/path/create-report\", {}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_wait_task_completion(self,", "import io import unittest import mock from mock import call", "'get_all') def test_get_by_with_incorrect_result_skip_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\": \"expected\"}, {\"name\": \"not", "50.889Z\"}, {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-13T14: 10: 50.322Z\"} ] task_with_output =", "mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") def test_create_should_return_new_resource_instance(self, mock_post): mock_post.return_value =", "'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_wait_task_completion(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy()", "\"typeV300\" DEFAULT_VALUES = { \"200\": {\"type\": TYPE_V200}, \"300\": {\"type\": TYPE_V300}", "'/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri)", "{'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all() expected_calls = [call(uri_list[0]), call(uri_list[1]),", "self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with(uri, dict_info, custom_headers=headers) @mock.patch.object(Resource,", "try: self.resource_client.get_by(None, None) except ValueError as e: self.assertTrue(\"field\" in e.args[0])", "{} new_instance = self.resource_client.create({}) self.assertNotEqual(self.resource_client, new_instance) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "[{'id': '1'}, {'id': '2'}, {'id': '3'}, {'id': '4'}, {'id': '5'},", "\"get_by\") def test_ensure_resource_should_update_resource_data(self, mock_do_get, mock_get_by): get_by_return_value = [{\"name\": \"testname\", \"uri\":", "'new_name', -1) self.assertEqual(result, entity) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers_with_content_type(self,", "exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self): message", "\"&sort=name%3Aascending\".format(resource_uri=self.URI) self.assertEqual([{\"member\": \"member\"}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_defaults(self, mock_get):", "= \"'name'='OneViewSDK \\\"Test FC Network'\" sort = 'name:ascending' query =", "mock_wait4task, mock_post_multipart): fake_associated_resurce = mock.Mock() uri = \"/rest/testuri/\" filepath =", "mock_post): dict_to_create = {\"resource_name\": \"a name\", \"force\": \"yes\"} mock_post.return_value =", "None, \"members\": [{\"id\": \"7\"}]}] mock_get.side_effect = results result = self.resource_client.get_all(count=15)", "was not raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client._helper.build_uri('') except ValueError as", "test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task,", "'123a53cz', 'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={}) @mock.patch.object(connection,", "{} resource_client = ResourceClient(self.connection, self.URI) resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers)", "\"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all() expected_calls", "test_patch_request_when_id_is_provided_v300(self, mock_patch): request_body = [{ 'op': 'replace', 'path': '/name', 'value':", "'wait_for_task') def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value", "resource='5', subresource='/rest/testuri/5/sub/6', path='sub', uri='/rest/testuri/5/sub/6'), dict( resource='/rest/testuri/7', subresource='/rest/testuri/7/sub/8', path='sub', uri='/rest/testuri/7/sub/8'), dict(", "expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_default_api_version_300(self, mock_put, mock_ensure_resource):", "= self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body() mock_post.assert_called_once_with( \"/rest/testuri\", {},", "filter, query, sort) uri = \"{resource_uri}?start=1\" \\ \"&count=500\" \\ \"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27\"", "Copyright [2019] Hewlett Packard Enterprise Development LP # # Licensed", "use this file except in compliance with the License. #", "def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceFileHandler(self.connection) super(ResourceFileHandlerMixinTest,", "= None, mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with('/rest/testuri', mock.ANY, mock.ANY) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor,", "default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_without_default_values(self, mock_post): dict_to_create", "uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body resource_client = ResourceClient(self.connection,", "\"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_and_custom_headers(self, mock_wait4task,", "{\"id\": \"7\"}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\"", "32: 50.889Z\"}, {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-13T14: 10: 50.322Z\"} ] task_with_output", "\"2015-03-24T15: 32: 50.889Z\"}, {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-13T14: 10: 50.322Z\"} ]", "mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(connection, \"get\") def test_get_by_uri(self, mock_get): self.resource_client.get_by_uri(\"/rest/testuri\") mock_get.assert_called_once_with('/rest/testuri')", "members = [{'id': '1'}, {'id': '2'}, {'id': '3'}] mock_get.return_value =", "'put') def test_update_with_default_api_version_300(self, mock_put): dict_to_update = {\"name\": \"test\"} uri =", "mock_wait4task, mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task filter", "def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'", "@mock.patch.object(connection, \"get\") def test_get_utilization_with_multiple_filters(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization( fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=[\"startDate=2016-05-30T03:29:42.361Z\", \"endDate=2016-05-31T03:29:42.361Z\"],", "uri_list[2], \"members\": [{\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]}, {'nextPageUri': None,", "self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) def test_merge_api_default_values(self): resource =", "\"/rest/resource/test\" mock_put.return_value = None, self.response_body response = self.resource_client.update(dict_to_update, uri=uri) self.assertEqual(self.response_body,", "uri} expected = {\"resource_data\": \"resource_data\", \"uri\": uri, \"type\": \"typeV300\"} mock_update.return_value", "def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=15\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results =", "mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') def test_update_with_force(self, mock_put): dict_to_update", "= {} uri = self.URI + \"/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" self.resource_client.get(uri) mock_get.assert_called_once_with(uri) def", "def test_extract_id_from_uri(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155' id = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id =", "= self.resource_client._helper.build_subresource_uri(option[\"resource\"], option[\"subresource\"], option[\"path\"]) self.assertEqual(uri, option[\"uri\"]) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client._helper.build_subresource_uri(None,", "mock.ANY, mock.ANY) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_wait_for_task_when_response_is_task(self,", "@mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v200(self, mock_patch, mock_ensure_resource): mock_patch.return_value = {}, {}", "transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method) class StubResourceFileHandler(ResourceFileHandlerMixin, Resource): \"\"\"Stub class", "test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value = self.task, self.task mock_wait4task.return_value =", "def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri = '/rest/testuri/' filepath =", "self.connection = connection('127.0.0.1', 300) self.resource_client = StubResource(self.connection) super(ResourceTest, self).setUp(self.resource_client) self.resource_helper", "io.StringIO() result = self.resource_client.download(uri, file_path) self.assertFalse(result) class ResourceZeroBodyMixinTest(BaseTest): def setUp(self):", "\"2\"}, {\"id\": \"3\"}, {\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}, {\"id\":", "self.response_body self.resource_client.data = {\"uri\": \"/rest/testuri\"} result = self.resource_client.delete() self.assertTrue(result) @mock.patch.object(connection,", "mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_client.get_collection('12345') mock_get.assert_called_once_with(self.URI +", "mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"name\": \"test\"} expected =", "mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.data = {\"uri\":", "def test_update_with_uri_called_once(self, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"name\":", "mock_get_by): mock_get_by.return_value = [] response = self.resource_client.get_by_name(\"Resource Name,\") self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\",", "'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v200(self, mock_patch): mock_patch.return_value = {}, {}", "\"test\", \"type\": \"typeV300\"} self.resource_client.data = {'uri': uri} expected = {\"name\":", "self.TYPE_V300} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def", "mock_put.return_value = None, self.response_body expected = {\"name\": \"test\", \"type\": \"anotherType\",", "mock_get): self.resource_client.get_utilization('09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter='startDate=2016-05-30T03:29:42.361Z', refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\", "try: self.resource_client.get_utilization('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected", "collection = self.resource_client.get_collection('12345') self.assertEqual(len(collection), 2) @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property(self, mock_get_all):", "fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, mock.ANY) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\"))", "\\ '&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_with_multiple_filters(self, mock_get):", "test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update): mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task", "\"get\") def test_get_by_id_without_result(self, mock_get): mock_get.return_value = [] response = self.resource_client.get_by_id(\"123\")", "\"get_completed_task\") def test_patch_request_custom_headers(self, mock_task, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\" dict_info", "class to test resource patch operations\"\"\" class StubResourceUtilization(ResourceUtilizationMixin, Resource): \"\"\"Stub", "for this resource', e.args[0]) else: self.fail() @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task')", "resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" try: self.resource_client.get(uri) except exceptions.HPOneViewUnknownType as exception:", "'put') def test_update_with_api_version_200(self, mock_put): dict_to_update = {\"name\": \"test\"} uri =", "mock_post.assert_called_once_with(\"/rest/path/create-report\", {}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_wait_task_completion(self, mock_get_completed_task, mock_post):", "'get_all') def test_get_by_property(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all')", "= \"name='Exchange Server'\" result = self.resource_client.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(connection,", "exception.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'get') def", "dict( resource='5', subresource='/rest/testuri/5/sub/6', path='sub', uri='/rest/testuri/5/sub/6'), dict( resource='/rest/testuri/7', subresource='/rest/testuri/7/sub/8', path='sub', uri='/rest/testuri/7/sub/8'),", "'get') def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results", "= False mock_open.return_value = io.StringIO() result = self.resource_client.download(uri, file_path) self.assertFalse(result)", "@mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put): response_body =", "was not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri(\"/rest/\") except exceptions.HPOneViewUnknownType as", "mock_post_multipart.return_value = self.task, mock.Mock() mock_wait4task.return_value = fake_associated_resurce result = self.resource_client.upload(filepath,", "200 expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V200} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES)", "custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, \"post\") def", "self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, \"SPPgen9snap6.2015_0405.81.iso\") @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart):", "mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" fake_file =", "# limitations under the License. ### import io import unittest", "@mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers(self, mock_task, mock_patch, mock_ensure_resource): uri = \"/rest/testuri\"", "a nextPageUri is returned by OneView. \"\"\" uri_list = ['/rest/testuri?start=0&count=3',", "@mock.patch.object(connection, 'post') def test_create_without_default_values(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"}", "dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_custom_headers(self, mock_post): dict_to_create = {\"resource_name\":", "self.resource_client.create({}) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail()", "in compliance with the License. # You may obtain a", "mock.ANY, custom_headers={\"Accept-Language\": \"en_US\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor,", "None, 'members': [{'id': '7'}]}] mock_get.side_effect = results result = self.resource_client.get_all()", "@mock.patch.object(ResourceHelper, \"do_get\") def test_refresh(self, mock_do_get): updated_data = {\"resource_name\": \"updated name\"}", "\"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources') except", "def test_get_by_id_uri(self, mock_get): self.resource_client.get('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(ResourceClient, 'get_by') def", "\"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch, mock_ensure_resource):", "self.task self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor,", "software # distributed under the License is distributed on an", "self.resource_client.update(dict_to_update, uri=\"/path\", custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') def", "= {\"uri\": \"uri\"} mock_delete.return_value = {}, {} delete_result = self.resource_client.delete(resource)", "self.fail(\"Expected Exception was not raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client.build_uri('') except", "options: uri = self.resource_client.build_subresource_uri(option['resource'], option['subresource'], option['path']) self.assertEqual(uri, option['uri']) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self):", "exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection,", "{\"body\": \"body\"} self.custom_headers = {'Accept-Language': 'en_US'} @mock.patch.object(connection, 'get') def test_get_all_called_once(self,", "self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None)", "uri_list[1], \"members\": [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}]}, {\"nextPageUri\": uri_list[2],", "mock_wait4task.return_value = dict_to_update self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(self.resource_client.data, dict_to_update) @mock.patch.object(Resource, \"get_by\") def", "@mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update): mock_update.return_value =", "test_get_collection_with_filter(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=\"name=name\") mock_get.assert_called_once_with(self.URI + \"?filter=name%3Dname\") @mock.patch.object(connection,", "@mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch): dict_info =", "\\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_with_multiple_filters(self, mock_get): self.resource_client.get_utilization( '09USE7335NW3',", "self.task mock_wait4task.return_value = response_body new_resource = self.resource_client.create_with_zero_body(timeout=-1) self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(connection,", "self.resource_client = StubResource(self.connection) super(ResourceTest, self).setUp(self.resource_client) self.resource_helper = ResourceHelper(self.URI, self.connection, None)", "= merge_default_values([resource1, resource2], default_type) expected_list = [ {\"name\": \"resource1\", \"type\":", "dict( resource=\"/rest/testuri/7\", subresource=\"/rest/testuri/7/sub/8\", path=\"sub\", uri=\"/rest/testuri/7/sub/8\"), dict( resource=None, subresource=\"/rest/testuri/9/sub/10\", path=\"sub\", uri=\"/rest/testuri/9/sub/10\"),", "self.task, self.response_body mock_wait4task.return_value = self.task filter = \"name='Exchange Server'\" uri", "{'name': 'resource1'} default_values = {} expected = {'name': 'resource1'} resource_client", "= \"/rest/enclosures\" result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, self.response_body) class", "result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "= \"name='Exchange Server'\" uri = \"/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True\" self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_delete.assert_called_once_with(uri)", "resource=\"/rest/testuri/7\", subresource=\"/rest/testuri/7/sub/8\", path=\"sub\", uri=\"/rest/testuri/7/sub/8\"), dict( resource=None, subresource=\"/rest/testuri/9/sub/10\", path=\"sub\", uri=\"/rest/testuri/9/sub/10\"), dict(", "@mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_uri(self, mock_wait4task, mock_update): dict_to_update =", "self.task, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "import mock_builtin from hpOneView.connection import connection from hpOneView import exceptions", "test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock() uri = \"/rest/testuri/\" filepath", "= connection('127.0.0.1', 300) self.resource_client = StubResourceSchema(self.connection) super(ResourceSchemaMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"get\")", "\"post\") def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None, self.response_body new_resource =", "'09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter=['startDate=2016-05-30T03:29:42.361Z', 'endDate=2016-05-31T03:29:42.361Z'], refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\", "= \"name NE 'WrongName'\" view = '\"{view-name}\"' scope_uris = '/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'", "\"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri =", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch, mock_ensure_resource): entity = {\"resource_id\":", "expected_output = '/rest/testuri/09USE7335NW3' result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self):", "\"FCIssueResponseV2\", \"created\": \"2015-03-13T14: 10: 50.322Z\"} ] task_with_output = self.task.copy() task_with_output['taskOutput']", "\"name2=two\", \"name=three\"]) mock_get.assert_called_once_with(self.URI + \"?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, \"get\") def test_get_collection_should_return_list(self, mock_get):", "not raised\") @mock.patch.object(connection, 'get') def test_get_utilization_with_args(self, mock_get): self.resource_client.get_utilization('09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter='startDate=2016-05-30T03:29:42.361Z',", "self.response_body mock_wait4task.return_value = self.task self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY,", "\"value\": \"new_name\", }] mock_patch.return_value = {}, {} self.connection._apiVersion = 200", "mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1)", "300) self.resource_client = StubResourcePatch(self.connection) super(ResourcePatchMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\")", "expected_calls) @mock.patch.object(connection, \"get\") def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\",", "uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=uri, force=True)", "uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with_uri(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/testuri/5435534/sub') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\",", "def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {'nextPageUri': None, 'members': []} result", "'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri =", "self.resource_client.get_all(count=15) expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, \"get\")", "mock_ensure_resource): mock_delete.return_value = None, self.response_body self.resource_client.data = {\"uri\": \"/rest/testuri\"} result", "self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body(uri=\"/rest/testuri\", custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"})", "uri = self.resource_client.build_subresource_uri(option['resource'], option['subresource'], option['path']) self.assertEqual(uri, option['uri']) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try:", "\"get\") def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_helper.get_all(uri=\"/rest/other/resource/12467836/subresources\") except exceptions.HPOneViewUnknownType as e:", "timeout=-1) self.assertEqual(self.task, delete_task) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_by_id_called_once(self, mock_wait4task,", "@mock.patch.object(connection, \"post\") def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create = {\"resource_name\": \"a name\",", "self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") class ResourceTest(BaseTest): def setUp(self): self.connection =", "mock_get.side_effect = results result = self.resource_client.get_all(count=15) expected_items = [{'id': '1'},", "mock import call from tests.test_utils import mock_builtin from hpOneView.connection import", "resource2 = {\"name\": \"resource2\", \"port\": \"1\"} expected_resource = {\"name\": \"resource2\",", "@mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\"", "results result = self.resource_client.get_all(count=15) expected_items = [{'id': '1'}, {'id': '2'},", "result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, task_output) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def", "test_get_all_with_custom_uri(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources') uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def", "Exception was not raised\") @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_do_post_request(self,", "{}, {} self.connection._apiVersion = 200 self.resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers)", "self.response_body = {\"body\": \"body\"} self.custom_headers = {\"Accept-Language\": \"en_US\"} class ResourceFileHandlerMixinTest(BaseTest):", "def test_delete_should_return_true(self, mock_delete, mock_ensure_resource): mock_delete.return_value = None, self.response_body self.resource_client.data =", "custom_headers={'Accept-Language': 'en_US'}) def test_delete_dict_invalid_uri(self): dict_to_delete = {\"task\": \"task\", \"uri\": \"\"}", "= task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, task_output) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor,", "self.resource_client.create_with_zero_body(custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "\"task\", \"uri\": \"\"} try: self.resource_client.delete(dict_to_delete, False, -1) except exceptions.HPOneViewUnknownType as", "self.task, {} mock_get_completed_task.return_value = task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, [])", "def test_get_utilization_with_args(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization(fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=\"startDate=2016-05-30T03:29:42.361Z\", refresh=True, view=\"day\") expected_uri =", "custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_uri(self, mock_wait4task,", "[{'id': '1'}, {'id': '2'}, {'id': '3'}] mock_get.return_value = { 'nextPageUri':", "as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_get_by_with_name_none(self): try:", "test resource patch operations\"\"\" class StubResourceUtilization(ResourceUtilizationMixin, Resource): \"\"\"Stub class to", "'&count=500' \\ '&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27' \\ '&query=name%20NE%20%27WrongName%27' \\ '&sort=name%3Aascending' \\ '&view=%22%7Bview-name%7D%22' \\", "\"port\": \"1\"} expected_resource = {\"name\": \"resource2\", \"type\": \"resource\", \"port\": \"1\"}", "= \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources?param=value\") uri", "def test_get_utilization_by_uri_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri) class", "self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input = \"/rest/testuri/09USE7335NW3\" expected_output = \"/rest/testuri/09USE7335NW3\"", "= self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_post.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\",", "class to test resource common methods\"\"\" URI = \"/rest/testuri\" class", "mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources?param=value\") uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\"", "'resource1'} resource_client = ResourceClient(self.connection, self.URI) result = resource_client.merge_default_values(resource, default_values) self.assertEqual(result,", "@mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_patch_return_entity(self, mock_wait4task, mock_patch): entity =", "= {'Accept-Language': 'en_US'} @mock.patch.object(connection, 'get') def test_get_all_called_once(self, mock_get): filter =", "def test_get_by_property_with__invalid_uri(self, mock_get_all): try: self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/other/5435534/sub') except exceptions.HPOneViewUnknownType as", "test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None, self.response_body new_resource = self.resource_client.create_with_zero_body() self.assertNotEqual(new_resource,", "e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_update_with_none(self): try: self.resource_client.update(None)", "test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce = mock.Mock() uri = '/rest/testuri/'", "mock_download_to_stream.assert_called_once_with(fake_file, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream):", "mock.ANY) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_wait_for_task_when_response_is_task(self, mock_get,", "mock_patch.return_value = self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\")", "setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResource(self.connection) super(ResourceTest, self).setUp(self.resource_client)", "\"value\"}]} collection = self.resource_helper.get_collection() self.assertEqual(len(collection), 2) def test_build_uri_with_id_should_work(self): input =", "'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]}, {'nextPageUri': uri_list[2], 'members':", "'not expected'}]) mock_get_all.assert_called_once_with(filter=\"\\\"connection.name='expected'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with_uri(self, mock_get_all): self.resource_client.get_by('name',", "'127.0.0.1' self.connection = connection(self.host, 300) self.resource_client = ResourceClient(self.connection, self.URI) self.task", "= transform_list_to_dict(list=list) self.assertEqual(dict_transformed, {'5': True, 'four': True, 'one': True, 'tree':", "mock_update): dict_to_update = {\"resource_data\": \"resource_data\", \"uri\": \"a_uri\"} mock_update.return_value = self.task,", "self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'put') def test_update_with_uri_called_once(self, mock_put):", "timeout=60) mock_get_completed_task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_output_list_when_results(self, mock_get_completed_task,", "= task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, []) @mock.patch.object(connection, 'post') def", "self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open,", "self.assertEqual(extracted_id, '3518be0e-17c1-4189-8f81-83f3724f6155') def test_extract_id_from_uri_unsupported(self): # This example is not supported", "result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client.build_uri(None) except", "def test_update_without_default_values(self, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"name\":", "result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client._helper.build_uri(None) except", "with the License. # You may obtain a copy of", "'members': [{'id': '7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0])", "{ \"resource_name\": \"a name\", \"uri\": \"a_uri\", } mock_put.return_value = self.task,", "self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_get_by_with_name_none(self): try: self.resource_client.get_by(None, None)", "mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'put') def test_update_with_force(self, mock_put): dict_to_update =", "uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock()", "@mock.patch.object(connection, 'get') def test_get_utilization_with_multiple_filters(self, mock_get): self.resource_client.get_utilization( '09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter=['startDate=2016-05-30T03:29:42.361Z', 'endDate=2016-05-31T03:29:42.361Z'],", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_return_entity(self, mock_wait4task, mock_post): dict_to_create = { \"resource_name\":", "= {'name': 'resource1'} resource_client = ResourceClient(self.connection, self.URI) result = resource_client.merge_default_values(resource,", "mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, 'SPPgen9snap6.2015_0405.81.iso') @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self,", "def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'", "= 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with(uri, dict_info, custom_headers=headers) @mock.patch.object(Resource, \"ensure_resource_data\")", "result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "uri} mock_put.return_value = self.task, {} mock_wait4task.return_value = dict_to_update self.resource_client.update(dict_to_update, timeout=-1)", "[{\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]}, {\"nextPageUri\": None, \"members\": [{\"id\":", "mock_wait4task.return_value = self.task update_task = self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, update_task) mock_update.assert_called_once_with(\"a_uri\",", "self.resource_client.build_uri( '/rest/test/another/resource/uri/09USE7335NW3') except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected", "\"post\") def test_create_with_api_version_200(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value", "mock_delete): mock_delete.return_value = None, self.response_body filter = \"name='Exchange Server'\" result", "@mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_return_empty_list_when_output_is_empty(self, mock_get_completed_task, mock_post): task_with_output =", "mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(connection, 'get') def test_get_collection_uri(self, mock_get): mock_get.return_value =", "= self.resource_client.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "fake_associated_resurce) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body", "is returned by OneView. \"\"\" uri_list = ['/rest/testuri?start=0&count=3', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3']", "mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1],", "[]} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_no_members(self,", "\"uri\": uri } result = self.resource_client.get_all() self.assertSequenceEqual(result, members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection,", "expected_list) def test_raise_unavailable_method_exception(self): self.assertRaises(exceptions.HPOneViewUnavailableMethod, unavailable_method) class FakeResource(object): def __init__(self, con):", "e.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, \"get\") def", "\"/rest/testuri\"} self.resource_client.delete(force=True) mock_delete.assert_called_once_with(\"/rest/testuri?force=True\", custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "def test_update_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update =", "view=\"day\") expected_uri = \"/rest/testuri/utilization\" \\ \"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z\" \\ \"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z\" \\ \"&fields=AmbientTemperature%2CAveragePower%2CPeakPower\"", "mock_post.return_value = None, self.response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result,", "@mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\"", "\"get\") def test_get_by_uri(self, mock_get): self.resource_client.get_by_uri(\"/rest/testuri\") mock_get.assert_called_once_with('/rest/testuri') @mock.patch.object(connection, \"get\") def test_get_by_id_with_result(self,", "expected_uri = \"/rest/testuri\" mock_post.assert_called_once_with(expected_uri, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_api_version_200(self,", "expected_resource = {\"name\": \"resource2\", \"type\": \"resource\", \"port\": \"1\"} merged_resource =", "\"5\"}, {\"id\": \"6\"}, {\"id\": \"7\"}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, \"get\") def", "mock_do_put, mock_ensure_resource): self.resource_client.data = {\"uri\": \"/rest/test\"} self.resource_client.update(data={\"name\": \"test\"}) mock_do_put.assert_called_once() mock_ensure_resource.assert_called_once()", "'7'}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get): uri_list =", "self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client.build_uri(None) except ValueError as exception:", "@mock.patch.object(connection, 'get') def test_get_collection_with_filter(self, mock_get): mock_get.return_value = {} self.resource_client.get_collection('12345', 'name=name')", "uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" fake_file = io.StringIO() mock_open.return_value = fake_file self.resource_client.download(uri,", "name\"} mock_patch.return_value = {}, {} headers = {\"Content-Type\": \"application/json\", \"Extra\":", "\"EXpected\"}]) mock_get_all.assert_called_once_with(filter=\"\\\"name='exPEcted'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_skip_autofix(self, mock_get_all): mock_get_all.return_value =", "= self.task self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_post.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", {}, custom_headers=None) @mock.patch.object(connection, 'post')", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update,", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_custom_headers(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value = self.task,", "\"name\"} mock_post.return_value = self.task, self.task mock_wait4task.return_value = response_body result =", "self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, update_task) mock_update.assert_called_once_with(\"a_uri\", dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor,", "express or implied. # See the License for the specific", "\"type\": \"anotherType\", \"uri\": uri} self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\")", "'post') def test_create_report_should_raise_exception_when_not_task(self, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = []", "except in compliance with the License. # You may obtain", "timeout=-1) expected_uri = \"/rest/testuri\" mock_post.assert_called_once_with(expected_uri, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def", "self.assertEqual(result, []) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {'nextPageUri':", "extracted_id = extract_id_from_uri(uri) self.assertEqual(id, extracted_id) def test_extract_id_from_uri_with_extra_slash(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/'", "\"/name\", \"new_name\") mock_patch.assert_called_once_with( \"/rest/testuri\", request_body, custom_headers={\"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "\"post\") def test_create_should_return_new_resource_instance(self, mock_post): mock_post.return_value = {}, {} new_instance =", "= self.DEFAULT_VALUES self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client._merge_default_values() self.task = {\"task\":", "mock_get.side_effect = results self.resource_client.get_all() expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list,", "self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, \"put\") def test_update_with_zero_body_without_task(self, mock_put):", "\"do_put\") @mock.patch.object(Resource, \"ensure_resource_data\") def test_ensure_resource_should_call_once(self, mock_do_put, mock_ensure_resource): self.resource_client.data = {\"uri\":", "\"/name\", \"value\": \"new_name\", }] mock_patch.return_value = {}, {} self.connection._apiVersion =", "self.resource_client.get_by('connection.name', 'expected') self.assertEqual(response, [{'name': 'expected'}, {'name': 'not expected'}]) mock_get_all.assert_called_once_with(filter=\"\\\"connection.name='expected'\\\"\", uri='/rest/testuri')", "self.resource_client.delete(force=True) mock_delete.assert_called_once_with(\"/rest/testuri?force=True\", custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "'resource1', \"type\": \"EnclosureGroupV300\"} resource_client = ResourceClient(self.connection, self.URI) result = resource_client.merge_default_values(resource,", "mock_wait4task.return_value = self.task filter = \"name='Exchange Server'\" delete_task = self.resource_client.delete_all(filter=filter,", "@mock.patch.object(connection, 'get') def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri = '/rest/testuri/'", "def test_create_uri(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value =", "mock_ensure_resource): response_body = {\"resource_name\": \"name\"} self.resource_client.URI = \"/rest/enclosures\" mock_put.return_value =", "@mock.patch.object(connection, \"get\") def test_get_utilization_by_uri_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\"", "= \"09USE7335NW35\" expected_output = \"/rest/testuri/09USE7335NW35\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result)", "'123a53cz', 'replace', '/name', 'new_name', -1) self.assertEqual(result, entity) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor,", "in e.args[0]) else: self.fail() def test_get_collection_with_none(self): try: self.resource_client.get_collection(None) except ValueError", "'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v300(self,", "self.fail() @mock.patch.object(connection, 'get') def test_get_with_uri_should_work(self, mock_get): mock_get.return_value = {} uri", "@mock.patch.object(connection, 'get') def test_get_by_id_uri(self, mock_get): self.resource_client.get('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(ResourceClient,", "= [] actual_result = self.resource_client.ensure_resource_data(update_data=False) expected_result = None self.assertEqual(actual_result, expected_result)", "self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( uri, dict_info, custom_headers={\"Extra\": \"extra\", \"Content-Type\": \"application/json-patch+json\"})", "\"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update, mock_ensure_resource):", "= self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body(uri=\"/rest/testuri\", custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY,", "dict_to_update = {\"resource_data\": \"resource_data\", \"uri\": uri} expected = {\"resource_data\": \"resource_data\",", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "'path': '/name', 'value': 'new_name', }] mock_patch.return_value = {}, {} self.resource_client.patch(", "self.resource_client.create_with_zero_body() mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor,", "self.assertTrue(\"id\" in e.args[0]) else: self.fail() def test_create_with_none(self): try: self.resource_client.create(None) except", "@mock.patch.object(connection, 'get') def test_get_collection_should_return_list(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"},", "test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client.build_uri('') except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else:", "test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task", "@mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task,", "\"a name\", \"type\": \"anotherType\"} self.resource_client.create(dict_to_create) mock_post.assert_called_once_with(self.URI, expected, custom_headers=None) @mock.patch.object(connection, \"post\")", "task_with_output = self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value = None, {}", "True, 'one': True, 'tree': 3, 'two': True}) def test_extract_id_from_uri(self): uri", "'/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={}) @mock.patch.object(connection, 'patch') def", "uri='/rest/testuri/1/sub/2'), dict( resource='/rest/testuri/3', subresource='4', path='sub', uri='/rest/testuri/3/sub/4'), dict( resource='5', subresource='/rest/testuri/5/sub/6', path='sub',", "import exceptions from hpOneView.resources.resource import (ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin,", "@mock.patch.object(connection, 'patch') def test_patch_request_when_uri_is_provided(self, mock_patch): request_body = [{ 'op': 'replace',", "self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body(uri=\"/rest/testuri\", custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\":", "Exception was not raised\") def test_build_subresource_uri(self): options = [ dict(", "self.fail() def test_get_with_none(self): try: self.resource_client.get(None) except ValueError as e: self.assertTrue(\"id\"", "scope_uris = '/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a' mock_get.return_value = {\"members\": [{\"member\": \"member\"}]} result =", "'members': [{'id': '7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=15) expected_calls", "as exception: self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED, exception.args[0]) else: self.fail(\"Expected Exception was not raised\")", "\\ '&view=%22%7Bview-name%7D%22' \\ '&fields=name%2Cowner%2Cmodified' \\ '&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI) self.assertEqual([{'member': 'member'}], result) mock_get.assert_called_once_with(uri)", "self.assertEqual(result, response_body) @mock.patch.object(connection, 'post') def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value = None,", "'post') def test_create_with_api_version_200(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value", "mock_get): mock_get.return_value = [] response = self.resource_client.get_by_id(\"123\") self.assertIsNone(response) mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection,", "= results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri", "uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body response = self.resource_client.update(dict_to_update,", "get_by_return_value self.resource_client.ensure_resource_data(update_data=True) self.assertEqual(self.resource_client.data, get_by_return_value[0]) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_without_data_update(self, mock_get_by): mock_get_by.return_value", "def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results =", "{} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\", timeout=60) mock_get_completed_task.assert_called_once_with(self.task, 60) @mock.patch.object(connection, 'post')", "custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path =", "\"get\") def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = \"/rest/testuri?start=0&count=-1\" members = [{\"id\":", "self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_open_file(self, mock_open,", "CONDITIONS OF ANY KIND, either express or implied. # See", "option[\"uri\"]) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client._helper.build_subresource_uri(None, \"123456\", \"sub-path\") except exceptions.HPOneViewValueError as", "\"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.connection._apiVersion =", "def test_update_return_entity(self, mock_wait4task, mock_put): dict_to_update = { \"resource_name\": \"a name\",", "{'5': True, 'four': True, 'one': True, 'tree': 3, 'two': True})", "\"nextPageUri\": uri, \"members\": members, \"uri\": uri } result = self.resource_client.get_all()", "'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers(self, mock_task, mock_patch): dict_info = {\"resource_name\":", "filepath, 'SPPgen9snap6.2015_0405.81.iso') @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\"", "mock_do_get.return_value = get_by_return_value self.resource_client.ensure_resource_data(update_data=True) self.assertEqual(self.resource_client.data, get_by_return_value[0]) @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_without_data_update(self,", "dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_without_default_values(self, mock_put): dict_to_update = {\"name\":", "@mock.patch.object(connection, \"patch\") def test_patch_request_when_uri_is_provided(self, mock_patch, mock_ensure_resource): request_body = [{ \"op\":", "= {\"name\": \"testname\"} mock_get_by.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(ResourceHelper,", "ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED,", "{\"resource_name\": \"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)", "'3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(id, extracted_id) def test_extract_id_from_uri_with_extra_slash(self): uri =", "self.resource_client.build_subresource_uri(None, \"123456\", 'sub-path') except exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else:", "\\ \"&query=name%20NE%20%27WrongName%27\" \\ \"&sort=name%3Aascending\".format(resource_uri=self.URI) self.assertEqual([{\"member\": \"member\"}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\")", "custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_should_not_override_resource_properties(self, mock_put, mock_ensure_resource): dict_to_update", "supported yet uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/otherthing' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, 'otherthing')", "super(ResourcePatchMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v200(self, mock_patch, mock_ensure_resource):", "import unittest import mock from mock import call from tests.test_utils", "self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceUtilization(self.connection) super(ResourceUtilizationMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource,", "expected_result = None self.assertEqual(actual_result, expected_result) @mock.patch.object(connection, \"get\") def test_get_all_called_once(self, mock_get):", "the License. ### import io import unittest import mock from", "expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI,", "self.task mock_wait4task.return_value = response_body result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result,", "resource='/rest/testuri/13', subresource=None, path='/sub/', uri='/rest/testuri/13/sub'), ] for option in options: uri", "for pagination purposes, a nextPageUri is returned by OneView. \"\"\"", "{\"resource_name\": \"name\"} self.resource_client.URI = \"/rest/enclosures\" mock_put.return_value = self.task, self.task mock_wait4task.return_value", "= '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\ '&view=day'", "def test_create_uri_with_force(self, mock_post): dict_to_create = {\"resource_name\": \"a name\", \"force\": \"yes\"}", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task", "def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = \"/rest/testuri?start=0&count=-1\" members = [{\"id\": \"1\"},", "URI for this resource', e.args[0]) else: self.fail() @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor,", "self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def", "mock_delete): resource = {\"uri\": \"uri\"} mock_delete.return_value = {}, {} delete_result", "= None, self.response_body filter = \"name='Exchange Server'\" result = self.resource_helper.delete_all(filter=filter,", "mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = \"/rest/testuri?start=0&count=-1\" members", "'&view=%22%7Bview-name%7D%22' \\ '&fields=name%2Cowner%2Cmodified' \\ '&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI) self.assertEqual([{'member': 'member'}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection,", "custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update): mock_update.return_value", "mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {\"nextPageUri\": None,", "\"test\", \"type\": \"anotherType\"} uri = \"/rest/testuri\" mock_put.return_value = None, self.response_body", "= '\"{view-name}\"' scope_uris = '/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a' mock_get.return_value = {\"members\": [{\"member\": \"member\"}]}", "uri = \"/rest/testuri\" request_body = [{ \"op\": \"replace\", \"path\": \"/name\",", "results self.resource_client.get_all() expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection,", "self.custom_headers = {'Accept-Language': 'en_US'} @mock.patch.object(connection, 'get') def test_get_all_called_once(self, mock_get): filter", "'/rest/testuri/09USE7335NW3' result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client.build_uri(None)", "self.fail(\"Expected Exception was not raised\") def test_build_uri_with_incomplete_uri_should_raise_exception(self): try: self.resource_client.build_uri('/rest/') except", "= None, self.response_body expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V300} self.resource_client.update(dict_to_update,", "\"path\": \"/name\", \"value\": \"new_name\", }] mock_patch.return_value = {}, {} self.connection._apiVersion", "@mock.patch(mock_builtin(\"open\")) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri =", "mock_wait4task.return_value = entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") self.assertEqual(self.resource_client.data, entity) @mock.patch.object(Resource, \"ensure_resource_data\")", "self.task self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task, 60) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"delete\")", "self.response_body self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor,", "resource2) self.assertEqual(merged_resource, expected_resource) def test_merge_default_values(self): default_type = {\"type\": \"type1\"} resource1", "expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) def test_get_utilization_with_empty(self): try: self.resource_client.get_utilization('') except ValueError", "self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'post')", "= [] mock_post.return_value = None, {} try: self.resource_client.create_report(\"/rest/path/create-report\") except exceptions.HPOneViewException", "response_body) @mock.patch.object(connection, 'put') def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None, self.response_body", "import connection from hpOneView import exceptions from hpOneView.resources.resource import (ResourceClient,", "in e.args[0]) else: self.fail() def test_update_with_none(self): try: self.resource_client.update(None) except ValueError", "{}, {} resource_client = ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create,", "[] mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\", timeout=60)", "= self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( '/rest/testuri', {},", "= '{resource_uri}?start=1' \\ '&count=500' \\ '&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27' \\ '&query=name%20NE%20%27WrongName%27' \\ '&sort=name%3Aascending'", "{\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-24T15: 32: 50.889Z\"}, {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-13T14:", "mock_post.return_value = None, {} try: self.resource_client.create_report(\"/rest/path/create-report\") except exceptions.HPOneViewException as exception:", "self.connection = connection(self.host, 300) self.resource_client = ResourceClient(self.connection, self.URI) self.task =", "\"6\"}]}, {'nextPageUri': None, \"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect =", "= {}, {} self.resource_client.create(dict_to_create, timeout=-1) expected_uri = \"/rest/testuri\" mock_post.assert_called_once_with(expected_uri, dict_to_create,", "created_resource) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value", "mock_get_by): mock_get_by.return_value = [] actual_result = self.resource_client.ensure_resource_data(update_data=False) expected_result = None", "test_patch_return_entity(self, mock_wait4task, mock_patch, mock_ensure_resource): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value =", "call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get') def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list =", "mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None)", "mock_wait4task.return_value = self.task self.resource_client.delete('1', custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={'Accept-Language': 'en_US'}) def test_delete_dict_invalid_uri(self):", "'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_patch_return_entity(self, mock_wait4task, mock_patch):", "= {}, {} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post')", "@mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_should_wait_for_task(self, mock_wait4task, mock_delete): mock_delete.return_value =", "self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_args(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization(fields=\"AmbientTemperature,AveragePower,PeakPower\",", "fake_response_body) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path =", "self.resource_client.update_with_zero_body(\"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\")", "def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\")", "'/name', 'new_name', -1) self.assertEqual(result, entity) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def", "mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):", "# (C) Copyright [2019] Hewlett Packard Enterprise Development LP #", "result = self.resource_client.delete() self.assertTrue(result) @mock.patch.object(connection, 'delete') def test_helper_delete_all_should_return_true(self, mock_delete): mock_delete.return_value", "{\"resource_data\": \"resource_data\", \"uri\": \"a_uri\"} mock_update.return_value = self.task, self.response_body mock_wait4task.return_value =", "test_ensure_resource_raise_resource_not_found_exception_without_uri(self, mock_get_by): self.resource_client.data = {\"name\": \"testname\"} mock_get_by.return_value = [] with", "result = self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(result, dict_to_update) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task')", "\"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_default_api_version_300(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\":", "= self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( '/rest/testuri', {}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor,", "@mock.patch.object(connection, 'patch') def test_patch_request_when_id_is_provided_v200(self, mock_patch): request_body = [{ 'op': 'replace',", "@mock.patch.object(ResourceHelper, \"do_get\") def test_ensure_resource_raise_resource_not_found_exception_with_uri(self, mock_do_get): self.resource_client.data = {\"uri\": \"/uri/test\"} mock_do_get.return_value", "maximum number of results to be returned but for pagination", "= resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart(self, mock_post_multipart):", "= { '200': {\"type\": \"EnclosureGroupV200\"}, '300': {\"type\": \"EnclosureGroupV300\"} } expected", "test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = [] response = self.resource_client.get_by_name(\"Resource Name,\") self.assertIsNone(response)", "def test_build_uri_with_id_should_work(self): input = \"09USE7335NW35\" expected_output = \"/rest/testuri/09USE7335NW35\" result =", "self.response_body expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V300} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES)", "def test_delete_with_custom_headers(self, mock_ensure_resource, mock_delete, mock_wait4task): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value", "filter, query, sort, view, 'name,owner,modified', scope_uris=scope_uris) uri = '{resource_uri}?start=1' \\", "uri = \"/rest/testuri\" dict_to_update = {\"resource_data\": \"resource_data\", \"uri\": uri} expected", "\"/rest/testuri/09USE7335NW35\" result = self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input =", "@mock.patch.object(connection, \"put\") def test_update_with_custom_headers(self, mock_put, mock_ensure_resource): dict_to_update = {\"name\": \"test\"}", "as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_create_with_empty_dict(self): try:", "\"a name\", \"uri\": uri} mock_put.return_value = self.task, {} mock_wait4task.return_value =", "ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception was not", "= {\"name\": \"test\", \"uri\": uri, \"type\": \"typeV300\"} mock_put.return_value = None,", "True}) def test_extract_id_from_uri(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155' id = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id", "{\"id\": \"2\"}, {\"id\": \"3\"}, {\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"},", "mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(connection, 'get') def test_get_collection_with_filter(self, mock_get): mock_get.return_value =", "def test_get_by_id_without_result(self, mock_get): mock_get.return_value = [] response = self.resource_client.get_by_id(\"123\") self.assertIsNone(response)", "{\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all() expected_calls = [call(uri_list[0]), call(uri_list[1]),", "= self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, []) @mock.patch.object(connection, 'post') def test_create_report_should_raise_exception_when_not_task(self, mock_post): task_with_output", "update_task) mock_update.assert_called_once_with(\"a_uri\", dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_return_entity(self,", "= {\"name\": \"resource2\"} result_list = merge_default_values([resource1, resource2], default_type) expected_list =", "= [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(ResourceHelper, \"do_get\") @mock.patch.object(Resource, \"get_by\") def", "= results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri", "= self.resource_client.delete_all(filter=filter, force=True, timeout=-1) mock_wait4task.assert_called_with(self.task, timeout=-1) self.assertEqual(self.task, delete_task) @mock.patch.object(connection, 'delete')", "{} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") def", "test_merge_api_default_values(self): resource = {'name': 'resource1'} default_values = { '200': {\"type\":", "= None, self.response_body expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V300, \"uri\":", "\"type\": \"typeV300\"} self.resource_client.data = {'uri': uri} expected = {\"name\": \"test\",", "mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('1', custom_headers=self.custom_headers)", "= [] response = self.resource_client.get_by_name(\"Resource Name,\") self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\")", "= {\"resource_name\": \"name\"} mock_put.return_value = self.task, self.task mock_wait4task.return_value = response_body", "\"wait_for_task\") def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body = {\"resource_name\": \"name\"} mock_post.return_value", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri = \"/rest/testuri/\" filepath", "'\"{view-name}\"' scope_uris = '/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a' mock_get.return_value = {\"members\": [{\"member\": \"member\"}]} result", "= 200 expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create,", "'get') def test_get_utilization_with_args(self, mock_get): self.resource_client.get_utilization('09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter='startDate=2016-05-30T03:29:42.361Z', refresh=True, view='day') expected_uri", "test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body = {\"resource_name\": \"name\"} mock_post.return_value = self.task,", "self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task')", "TYPE_V200}, \"300\": {\"type\": TYPE_V300} } def setUp(self, resource_client=None): self.resource_client =", "else: self.fail(\"Expected Exception was not raised\") @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task')", "test_get_utilization_with_args(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization(fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=\"startDate=2016-05-30T03:29:42.361Z\", refresh=True, view=\"day\") expected_uri = \"/rest/testuri/utilization\"", "mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_default_api_version_300(self, mock_post): dict_to_create =", "to test resoruce zero body methods\"\"\" class StubResourcePatch(ResourcePatchMixin, Resource): \"\"\"Stub", "Server'\" result = self.resource_helper.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "self.assertIsNone(response) mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_collection_uri(self, mock_get): mock_get.return_value = {\"members\":", "Server'\" result = self.resource_client.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor,", "'value': 'new_name', }] mock_patch.return_value = {}, {} self.connection._apiVersion = 200", "e.args[0]) else: self.fail() def test_get_by_with_name_none(self): try: self.resource_client.get_by(None, None) except ValueError", "test_get_with_uri_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI for this resource\" uri =", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update): mock_update.return_value = self.task, self.task", "uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]}, {'nextPageUri': uri_list[2],", "\"a name\", \"type\": \"anotherType\"} mock_post.return_value = {}, {} expected =", "-1) self.assertEqual(result, created_resource) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_create(self, mock_wait4task,", "mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value = self.task,", "resource=None, subresource='/rest/testuri/9/sub/10', path='sub', uri='/rest/testuri/9/sub/10'), dict( resource='/rest/testuri/11', subresource='12', path='/sub/', uri='/rest/testuri/11/sub/12'), dict(", "'/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' fake_file = io.StringIO() mock_open.return_value = fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path,", "'4'}, {'id': '5'}, {'id': '6'}]}, {'nextPageUri': None, 'members': [{'id': '7'}]}]", "\"anotherType\", \"uri\": uri} self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "'get') def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {'nextPageUri': None, 'members': []}", "mock_wait4task.return_value = self.task delete_task = self.resource_client.delete('1', force=True, timeout=-1) self.assertEqual(self.task, delete_task)", "'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection,", "\"/name\", \"new_name\") mock_patch.assert_called_once_with(uri, request_body, custom_headers={}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def", "self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, 'SPPgen9snap6.2015_0405.81.iso') @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart):", "\"name:ascending\" query = \"name NE 'WrongName'\" mock_get.return_value = {\"members\": [{\"member\":", "mock_patch.assert_called_once_with(uri, request_body, custom_headers={}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_id_is_provided_v300(self, mock_patch,", "self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_wait4task.assert_called_once_with(self.task, mock.ANY) class ResourceUtilizationMixinTest(BaseTest): def setUp(self): self.connection", "@mock.patch.object(connection, 'get') def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): fake_associated_resurce = mock.Mock()", "expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_should_not_override_resource_properties(self, mock_post): dict_to_create = {\"resource_name\":", "test_create_report_should_return_empty_list_when_output_is_empty(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value", "result = resource_client.merge_default_values(resource, default_values) self.assertEqual(result, expected) def test_should_not_merge_when_default_values_not_defined(self): resource =", "test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") @mock.patch.object(connection, 'get') def test_get_by_id_uri(self,", "def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body = {\"resource_name\": \"name\"} mock_post.return_value =", "= [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]},", "@mock.patch.object(connection, 'post') def test_create_report_should_raise_exception_when_not_task(self, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] =", "@mock.patch(mock_builtin(\"open\")) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri =", "expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V300} self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri,", "fields='AmbientTemperature,AveragePower,PeakPower', filter=['startDate=2016-05-30T03:29:42.361Z', 'endDate=2016-05-31T03:29:42.361Z'], refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z'", "{}, {} self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_patch.assert_called_once_with( \"/rest/testuri\", request_body, custom_headers={\"Content-Type\": \"application/json-patch+json\"})", "updated_data = {\"resource_name\": \"updated name\"} mock_do_get.return_value = updated_data self.resource_client.refresh() self.assertEqual(self.resource_client.data,", "mock_put.return_value = self.task, {} mock_wait4task.return_value = dict_to_update result = self.resource_client.update(dict_to_update,", "\"/rest/testuri\" TYPE_V200 = \"typeV200\" TYPE_V300 = \"typeV300\" DEFAULT_VALUES = {", "\"a name\", \"uri\": \"a_uri\", } mock_put.return_value = self.task, {} mock_wait4task.return_value", "mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=[\"name1=one\", \"name2=two\", \"name=three\"]) mock_get.assert_called_once_with(self.URI + \"?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\")", "self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value =", "= \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri,", "= {\"resource_name\": \"a name\"} mock_patch.return_value = {}, {} headers =", "1, 500, filter, query, sort, view, 'name,owner,modified', scope_uris=scope_uris) uri =", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_should_not_override_resource_properties(self, mock_put, mock_ensure_resource): dict_to_update =", "expected\"}] response = self.resource_client.get_by('connection.name', 'expected') self.assertEqual(response, [{'name': 'expected'}, {'name': 'not", "not raised\") def test_build_uri_with_id_should_work(self): input = '09USE7335NW35' expected_output = '/rest/testuri/09USE7335NW35'", "def test_patch_request_when_id_is_provided_v200(self, mock_patch): request_body = [{ 'op': 'replace', 'path': '/name',", "= None, mock.Mock() self.resource_client.upload(filepath, uri) mock_post_multipart.assert_called_once_with(uri, filepath, 'SPPgen9snap6.2015_0405.81.iso') @mock.patch.object(connection, 'post_multipart_with_response_handling')", "@mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v200(self, mock_patch, mock_ensure_resource): mock_patch.return_value =", "\"wait_for_task\") def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri = \"/rest/testuri/\" filepath =", "permissions and # limitations under the License. ### import io", "def test_get_by_uri(self, mock_get): self.resource_client.get_by_uri(\"/rest/testuri\") mock_get.assert_called_once_with('/rest/testuri') @mock.patch.object(connection, \"get\") def test_get_by_id_with_result(self, mock_get):", "[] mock_post.return_value = None, {} try: self.resource_client.create_report(\"/rest/path/create-report\") except exceptions.HPOneViewException as", "'/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155' id = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(id, extracted_id) def", "self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else: self.fail(\"Expected Exception was not raised\") def test_merge_resources(self):", "custom_headers={\"Extra\": \"extra\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "dict_to_update = {\"resource_name\": \"a name\", \"uri\": uri} mock_put.return_value = self.task,", "uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value')", "= \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=uri, force=True) expected_uri", "uri = \"/rest/testuri\" dict_info = {\"resource_name\": \"a name\"} mock_patch.return_value =", "= {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} collection = self.resource_helper.get_collection() self.assertEqual(len(collection),", "test_get_collection_uri(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_client.get_collection('12345')", "self.task, self.response_body mock_wait4task.return_value = self.task delete_task = self.resource_client.delete('1', force=True, timeout=-1)", "methods\"\"\" class StubResourcePatch(ResourcePatchMixin, Resource): \"\"\"Stub class to test resource patch", "subresource=\"4\", path=\"sub\", uri=\"/rest/testuri/3/sub/4\"), dict( resource=\"5\", subresource=\"/rest/testuri/5/sub/6\", path=\"sub\", uri=\"/rest/testuri/5/sub/6\"), dict( resource=\"/rest/testuri/7\",", "@mock.patch.object(connection, \"post\") def test_create_with_default_api_version_300(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"}", "+ \"/12345\") @mock.patch.object(connection, 'get') def test_get_collection_with_filter(self, mock_get): mock_get.return_value = {}", "e: self.assertEqual('Unrecognized URI for this resource', e.args[0]) else: self.fail() @mock.patch.object(connection,", "'/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath, uri)", "self.resource_client.get_all(count=15) expected_items = [{'id': '1'}, {'id': '2'}, {'id': '3'}, {'id':", "False mock_open.return_value = io.StringIO() result = self.resource_client.download(uri, file_path) self.assertFalse(result) class", "class to test resource utilization methods\"\"\" class StubResourceSchema(ResourceSchemaMixin, Resource): \"\"\"Stub", "{\"name\": \"resource2\", \"type\": \"resource\", \"port\": \"1\"} merged_resource = merge_resources(resource1, resource2)", "test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=[\"name1=one\", \"name2=two\", \"name=three\"]) mock_get.assert_called_once_with(self.URI +", "path=\"sub\", uri=\"/rest/testuri/7/sub/8\"), dict( resource=None, subresource=\"/rest/testuri/9/sub/10\", path=\"sub\", uri=\"/rest/testuri/9/sub/10\"), dict( resource=\"/rest/testuri/11\", subresource=\"12\",", "dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None,", "self.assertNotEqual(self.resource_client, new_instance) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_create(self, mock_wait4task, mock_post):", "= self.resource_client.get_by_name(\"Resource Name,\") self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(connection, \"get\") def", "self.resource_client.get_by_name('Resource Name,') self.assertEqual(response, {\"name\": \"value\"}) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(ResourceClient, 'get_by')", "test_create_should_return_new_resource_instance(self, mock_post): mock_post.return_value = {}, {} new_instance = self.resource_client.create({}) self.assertNotEqual(self.resource_client,", "mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=\"/path\", custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language':", "test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {\"nextPageUri\": None, \"members\": []} result =", "self.resource_client.URI = \"/rest/enclosures\" mock_put.return_value = self.task, self.task mock_wait4task.return_value = response_body", "result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, []) @mock.patch.object(connection, 'post') def test_create_report_should_raise_exception_when_not_task(self, mock_post):", "= self.resource_helper.get_all( 1, 500, filter, query, sort) uri = \"{resource_uri}?start=1\"", "Name,\") @mock.patch.object(Resource, \"get_by\") def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = [] response", "StubResource(self.connection) super(ResourceTest, self).setUp(self.resource_client) self.resource_helper = ResourceHelper(self.URI, self.connection, None) @mock.patch.object(ResourceHelper, \"do_put\")", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task", "uri) mock_post_multipart.assert_called_once_with(uri, filepath, \"SPPgen9snap6.2015_0405.81.iso\") @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath", "test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{'nextPageUri':", "@mock.patch.object(connection, 'get') def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In this case, the", "\"/name\", \"new_name\") self.assertEqual(self.resource_client.data, entity) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\")", "'3'}, {'id': '4'}, {'id': '5'}, {'id': '6'}, {'id': '7'}] self.assertSequenceEqual(result,", "custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "from hpOneView.resources.resource import (ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin,", "\"5\"}, {\"id\": \"6\"}]}, {\"nextPageUri\": None, \"members\": [{\"id\": \"7\"}, {\"id\": \"8\"}]}]", "self.resource_client.update(dict_to_update, custom_headers=self.custom_headers) mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\")", "= '/rest/testuri/09USE7335NW3' expected_output = '/rest/testuri/09USE7335NW3' result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result)", "= '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '') def test_extract_id_from_uri_passing_id(self): uri", "{\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} collection = self.resource_helper.get_collection() self.assertEqual(len(collection), 2)", "\"/test\") @mock.patch.object(connection, \"get\") def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=[\"name1=one\",", "'new_name', }] mock_patch.return_value = {}, {} resource_client = ResourceClient(self.connection, self.URI)", "mock_patch.return_value = self.task, self.task mock_wait4task.return_value = entity result = self.resource_client.patch(", "\"7\"}, {\"id\": \"8\"}]}] mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection, \"get\")", "test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {'nextPageUri': None, 'members': None} result =", "@mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers(self, mock_task, mock_patch, mock_ensure_resource): uri", "def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value =", "[{\"key\": \"value\"}, {\"key\": \"value\"}]} collection = self.resource_helper.get_collection() self.assertEqual(len(collection), 2) def", "= self.task self.resource_client.URI = \"/rest/enclosures\" self.resource_client.update_with_zero_body(\"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None,", "\"get\") def test_get_utilization_by_id_with_defaults(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization() expected_uri = \"/rest/testuri/utilization\" mock_get.assert_called_once_with(expected_uri)", "body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with( uri, dict_info, custom_headers={\"Extra\": \"extra\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource,", "dict_to_update = {\"name\": \"test\"} expected = {\"name\": \"test\", \"uri\": uri,", "= {\"uri\": \"/rest/testuri\"} self.resource_client._merge_default_values() self.task = {\"task\": \"task\", \"taskState\": \"Finished\"}", "= [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls) @mock.patch.object(connection, 'get') def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self,", "mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value =", "test_get_utilization_with_multiple_filters(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization( fields=\"AmbientTemperature,AveragePower,PeakPower\", filter=[\"startDate=2016-05-30T03:29:42.361Z\", \"endDate=2016-05-31T03:29:42.361Z\"], refresh=True, view=\"day\") expected_uri", "'3'}]}, {'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]},", "= { 'nextPageUri': uri, 'members': members, 'uri': uri } result", "self.task delete_task = self.resource_client.delete('1', force=True, timeout=-1) self.assertEqual(self.task, delete_task) mock_delete.assert_called_once_with(self.URI +", "custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_default_api_version_300(self, mock_put): dict_to_update = {\"name\": \"test\"}", "\"value\"}, {\"key\": \"value\"}]} self.resource_client.get_collection('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(connection, 'get') def", "self.assertEqual(self.task, update_task) mock_update.assert_called_once_with(\"a_uri\", dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def", "'name:ascending' query = \"name NE 'WrongName'\" view = '\"{view-name}\"' scope_uris", "300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with(uri, dict_info, custom_headers=headers) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task", "test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client.build_subresource_uri(None, \"123456\", 'sub-path') except exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED,", "'get') def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection,", "expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_custom_headers(self, mock_put, mock_ensure_resource):", "\"a name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)", "\"a name\", } created_resource = { \"resource_id\": \"123\", \"resource_name\": \"a", "def test_update_with_uri_called_once(self, mock_put): dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\"", "{} self.resource_helper.get_collection(path=\"/test\") mock_get.assert_called_once_with(self.URI + \"/test\") @mock.patch.object(connection, \"get\") def test_get_collection_with_multiple_filters(self, mock_get):", "'/rest/testuri/09USE7335NW3' expected_output = '/rest/testuri/09USE7335NW3' result = self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def", "test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_open.return_value", "self.assertEqual(result, expected) @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart(self, mock_post_multipart): uri = '/rest/testuri/'", "'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri", "None, self.response_body expected = {\"name\": \"test\", \"type\": \"anotherType\", \"uri\": uri}", "self.response_body result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'post')", "\"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = False mock_open.return_value = io.StringIO()", "task_with_output['taskOutput'] = [] mock_post.return_value = None, {} try: self.resource_client.create_report(\"/rest/path/create-report\") except", "mock_ensure_resource): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task, self.task mock_wait4task.return_value", "'put') def test_update_with_custom_headers(self, mock_put): dict_to_update = {\"name\": \"test\"} mock_put.return_value =", "class ResourceUtilizationMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client =", "test_helper_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None, self.response_body filter = \"name='Exchange Server'\"", "self.assertEqual(len(collection), 2) @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\",", "resource_client self.resource_client.URI = self.URI self.resource_client.DEFAULT_VALUES = self.DEFAULT_VALUES self.resource_client.data = {\"uri\":", "resource=\"/rest/testuri/11\", subresource=\"12\", path=\"/sub/\", uri=\"/rest/testuri/11/sub/12\"), dict( resource=\"/rest/testuri/13\", subresource=None, path=\"/sub/\", uri=\"/rest/testuri/13/sub\"), ]", "mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('1', custom_headers=self.custom_headers) mock_update.assert_called_once_with(mock.ANY,", "@mock.patch.object(connection, 'delete') def test_helper_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None, self.response_body filter", "self.resource_client.delete(custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_uri_called_once(self,", "\\ '&query=name%20NE%20%27WrongName%27' \\ '&sort=name%3Aascending' \\ '&view=%22%7Bview-name%7D%22' \\ '&fields=name%2Cowner%2Cmodified' \\ '&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI)", "= ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{'nextPageUri': uri_list[1], 'members': [{'id':", "uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock()", "custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_and_custom_headers(self, mock_wait4task,", "None} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(ResourceHelper, \"do_get\") def test_refresh(self,", "TYPE_V300} } def setUp(self): super(ResourceClientTest, self).setUp() self.host = '127.0.0.1' self.connection", "\"123a53cz\"} mock_patch.return_value = self.task, self.task mock_wait4task.return_value = entity self.resource_client.patch(\"replace\", \"/name\",", "\"/rest/testuri\" class BaseTest(unittest.TestCase): URI = \"/rest/testuri\" TYPE_V200 = \"typeV200\" TYPE_V300", "but for pagination purposes, a nextPageUri is returned by OneView.", "file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = False mock_open.return_value", "ResourceSchemaMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceSchema(self.connection)", "query, sort) uri = \"{resource_uri}?start=1\" \\ \"&count=500\" \\ \"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27\" \\", "raised\") def test_merge_resources(self): resource1 = {\"name\": \"resource1\", \"type\": \"resource\"} resource2", "[{'id': '4'}, {'id': '5'}, {'id': '6'}]}, {'nextPageUri': None, 'members': [{'id':", "test_update_with_force(self, mock_put): dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value", "mock_post): dict_to_create = { \"resource_name\": \"a name\", } created_resource =", "\"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_request_when_uri_is_provided(self, mock_patch, mock_ensure_resource): request_body = [{", "'1'}, {'id': '2'}, {'id': '3'}] mock_get.return_value = { 'nextPageUri': uri,", "{} headers = {'Content-Type': 'application/json', 'Extra': 'extra'} self.connection._apiVersion = 300", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('1', custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY,", "def test_create_should_return_new_resource_instance(self, mock_post): mock_post.return_value = {}, {} new_instance = self.resource_client.create({})", "mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1)", "else: self.fail(\"Expected Exception was not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client.build_uri(", "result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, self.response_body) class ResourcePatchMixinTest(BaseTest): def", "subresource=None, path=\"/sub/\", uri=\"/rest/testuri/13/sub\"), ] for option in options: uri =", "@mock.patch.object(connection, 'get') def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1']", "{\"resource_name\": \"a name\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI,", "test_build_uri_with_id_should_work(self): input = '09USE7335NW35' expected_output = '/rest/testuri/09USE7335NW35' result = self.resource_client.build_uri(input)", "self.resource_client.download(uri, file_path) self.assertFalse(result) def test_transform_list_to_dict(self): list = ['one', 'two', {'tree':", "mock_put.return_value = None, self.response_body resource_client = ResourceClient(self.connection, self.URI) resource_client.update(dict_to_update, uri=uri)", "self.resource_client.get_by('name', 'MyFibreNetwork') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_autofix(self, mock_get_all): mock_get_all.return_value", "mock_get.side_effect = results result = self.resource_client.get_all(count=15) expected_items = [{\"id\": \"1\"},", "\"wait_for_task\") def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch, mock_ensure_resource): entity = {\"resource_id\": \"123a53cz\"}", "expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V300, \"uri\": uri} self.resource_client._merge_default_values() self.resource_client.update(dict_to_update)", "expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_return_entity(self,", "'/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id':", "mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=1\"] results = [{\"nextPageUri\": uri_list[1],", "mock_post.assert_called_once_with(self.URI, expected, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_without_default_values(self, mock_post): dict_to_create =", "{'id': '7'}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, 'get') def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get): uri_list", "resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_custom_headers(self, mock_post):", "custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={'Accept-Language': 'en_US'}) def test_delete_dict_invalid_uri(self): dict_to_delete = {\"task\": \"task\",", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_wait_for_activity_on_create(self, mock_wait4task, mock_post): mock_post.return_value = self.task, {}", "\"resource1\", \"type\": \"resource\"} resource2 = {\"name\": \"resource2\", \"port\": \"1\"} expected_resource", "self.resource_helper.get_collection() self.assertEqual(len(collection), 2) def test_build_uri_with_id_should_work(self): input = \"09USE7335NW35\" expected_output =", "Exception was not raised\") @mock.patch.object(connection, 'post') def test_create_when_the_resource_is_a_list(self, mock_post): dict_to_create", "self.resource_client.get_collection('12345') mock_get.assert_called_once_with(self.URI + \"/12345\") @mock.patch.object(connection, 'get') def test_get_collection_with_filter(self, mock_get): mock_get.return_value", "mock_post.assert_called_once_with(expected_uri, dict_to_create, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_api_version_200(self, mock_post): dict_to_create =", "resource_client=None): self.resource_client = resource_client self.resource_client.URI = self.URI self.resource_client.DEFAULT_VALUES = self.DEFAULT_VALUES", "test_get_collection_uri(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_helper.get_collection()", "'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_do_post_request(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy()", "def test_ensure_resource_raise_unique_identifier_exception(self): self.resource_client.data = [] self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers, self.resource_client.ensure_resource_data) @mock.patch.object(ResourceHelper, \"do_get\") def", "coding: utf-8 -*- ### # (C) Copyright [2019] Hewlett Packard", "result = self.resource_client.update_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'put') def", "self.task mock_wait4task.return_value = entity self.resource_client.patch(\"replace\", \"/name\", \"new_name\") mock_wait4task.assert_called_once_with(self.task, mock.ANY) class", "'7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all() expected_calls = [call(uri_list[0]),", "self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( '/rest/testuri', {}, custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task')", "mock_patch.assert_called_once_with('/rest/testuri/id', dict_info, custom_headers=headers) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_patch_request_custom_headers(self, mock_task,", "self.resource_client._helper.build_uri(None) except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception", "FakeResource(None) try: fake_resource.get_fake(uri) except exceptions.HPOneViewUnknownType as exception: self.assertEqual(message, exception.args[0]) else:", "expected = {'name': 'resource1'} resource_client = ResourceClient(self.connection, self.URI) result =", "resource schema methods\"\"\" class StubResource(Resource): \"\"\"Stub class to test resource", "= self.task, mock.Mock() mock_wait4task.return_value = fake_associated_resurce result = self.resource_client.upload(filepath, uri)", "custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path =", "results to be returned but for pagination purposes, a nextPageUri", "= self.task self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(force=True) mock_delete.assert_called_once_with(\"/rest/testuri?force=True\", custom_headers=None) @mock.patch.object(Resource,", "mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources') uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\"", "mock_get.return_value = {} self.resource_client.get_collection('12345', 'name=name') mock_get.assert_called_once_with(self.URI + \"/12345?filter=name%3Dname\") @mock.patch.object(connection, 'get')", "{ 'nextPageUri': uri, 'members': members, 'uri': uri } result =", "{\"nextPageUri\": uri_list[2], \"members\": [{\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]}, {\"nextPageUri\":", "'3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '3518be0e-17c1-4189-8f81-83f3724f6155') def test_extract_id_from_uri_unsupported(self): # This", "\"resource1\", \"type\": \"type1\"}, {\"name\": \"resource2\", \"type\": \"type1\"} ] self.assertEqual(result_list, expected_list)", "\"post\") def test_create_uri(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value", "test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, mock.Mock() self.resource_client.upload(filepath)", "\"body\"} self.custom_headers = {\"Accept-Language\": \"en_US\"} class ResourceFileHandlerMixinTest(BaseTest): def setUp(self): self.connection", "\"type\": \"anotherType\"} mock_post.return_value = {}, {} expected = {\"resource_name\": \"a", "resource2 = {\"name\": \"resource2\"} result_list = merge_default_values([resource1, resource2], default_type) expected_list", "self.fail(\"Expected Exception was not raised\") def test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client.build_uri( '/rest/test/another/resource/uri/09USE7335NW3')", "'two', {'tree': 3}, 'four', 5] dict_transformed = transform_list_to_dict(list=list) self.assertEqual(dict_transformed, {'5':", "mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body() mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\")", "\"test\", \"uri\": uri, \"type\": \"typeV300\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update)", "{} mock_get_completed_task.return_value = task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, task_output) @mock.patch.object(connection,", "{ \"resource_name\": \"a name\", } created_resource = { \"resource_id\": \"123\",", "self.resource_client.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_should_wait_for_task(self,", "custom_headers=None) @mock.patch.object(connection, 'put') def test_update_without_default_values(self, mock_put): dict_to_update = {\"name\": \"test\"}", "= self.resource_client._helper.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input = \"/rest/testuri/09USE7335NW3\" expected_output", "\"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri = \"/rest/testuri/\"", "URI = \"/rest/testuri\" TYPE_V200 = 'typeV200' TYPE_V300 = 'typeV300' DEFAULT_VALUES", "\"name NE 'WrongName'\" view = '\"{view-name}\"' scope_uris = '/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a' mock_get.return_value", "uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_with_default_api_version_300(self, mock_put):", "test resoruce zero body methods\"\"\" class StubResourcePatch(ResourcePatchMixin, Resource): \"\"\"Stub class", "= {}, {} self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\":", "[{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]}, {'nextPageUri':", "mock_put.return_value = self.task, self.task mock_wait4task.return_value = response_body result = self.resource_client.update_with_zero_body(", "mock_update.assert_called_once_with(\"a_uri\", dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_return_entity(self, mock_wait4task,", "}] mock_patch.return_value = {}, {} self.resource_client.patch( '/rest/testuri/123a53cz', 'replace', '/name', 'new_name',", "mock_ensure_resource): dict_to_update = {\"name\": \"test\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update,", "\"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def test_patch_with_custom_headers_v300(self, mock_patch, mock_ensure_resource): mock_patch.return_value", "{\"name\": \"testname\"} mock_get_by.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(ResourceHelper, \"do_get\")", "{\"id\": \"3\"}]}, {\"nextPageUri\": uri_list[2], \"members\": [{\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\":", "} def setUp(self): super(ResourceClientTest, self).setUp() self.host = '127.0.0.1' self.connection =", "[]) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {'nextPageUri': None,", "\"sub-path\") except exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else: self.fail(\"Expected Exception", "self.response_body self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None) @mock.patch.object(connection, 'put') def", "self.response_body) @mock.patch.object(connection, 'post') def test_create_uri(self, mock_post): dict_to_create = {\"resource_name\": \"a", "mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get')", "Version 2.0 (the \"License\"); # you may not use this", "self.assertEqual(self.resource_client.data, updated_data) @mock.patch.object(connection, \"post\") def test_create_uri(self, mock_post): dict_to_create = {\"resource_name\":", "mock_get.return_value = { 'nextPageUri': uri, 'members': members, 'uri': uri }", "fake_response_body = mock.Mock() uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "mock_get_all): mock_get_all.return_value = [{\"name\": \"expected\"}, {\"name\": \"not expected\"}] response =", "'200': {'type': TYPE_V200}, '300': {'type': TYPE_V300} } def setUp(self): super(ResourceClientTest,", "@mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_return_entity(self, mock_wait4task, mock_put): dict_to_update =", "test_get_utilization_with_args(self, mock_get): self.resource_client.get_utilization('09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter='startDate=2016-05-30T03:29:42.361Z', refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization'", "In this case, the user provides a maximum number of", "def test_build_uri_with_uri_should_work(self): input = '/rest/testuri/09USE7335NW3' expected_output = '/rest/testuri/09USE7335NW3' result =", "self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection,", "'put') def test_update_with_force(self, mock_put): dict_to_update = {\"name\": \"test\"} uri =", "{} self.resource_helper.get_collection(filter=\"name=name\") mock_get.assert_called_once_with(self.URI + \"?filter=name%3Dname\") @mock.patch.object(connection, \"get\") def test_get_collection_with_path(self, mock_get):", "'/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch')", "as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_update_with_none(self): try:", "{\"id\": \"6\"}, {\"id\": \"7\"}] self.assertSequenceEqual(result, expected_items) @mock.patch.object(connection, \"get\") def test_get_all_should_stop_requests_when_requested_count_reached(self,", "dict_to_update self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(self.resource_client.data, dict_to_update) @mock.patch.object(Resource, \"get_by\") def test_get_by_name_with_result(self, mock_get_by):", "\"type\": \"type1\"} ] self.assertEqual(result_list, expected_list) def test_raise_unavailable_method_exception(self): self.assertRaises(exceptions.HPOneViewUnavailableMethod, unavailable_method) class", "@mock.patch.object(connection, 'get') def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1']", "mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_open.return_value =", "members) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {'nextPageUri':", "def test_delete_with_none(self): try: self.resource_client.delete(None) except ValueError as e: self.assertTrue(\"Resource\" in", "test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI for this resource\" uri =", "\"get_by\") def test_ensure_resource_without_data_update(self, mock_get_by): mock_get_by.return_value = [] actual_result = self.resource_client.ensure_resource_data(update_data=False)", "@mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart): uri", "\"type\": \"anotherType\"} uri = \"/rest/testuri\" mock_put.return_value = None, self.response_body expected", "mock_wait4task, mock_put, mock_ensure_resource): response_body = {\"resource_name\": \"name\"} self.resource_client.URI = \"/rest/enclosures\"", "\"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\")", "self.resource_client.get(uri) mock_get.assert_called_once_with(uri) def test_get_with_uri_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI for this", "test_build_uri_with_different_resource_uri_should_raise_exception(self): try: self.resource_client._helper.build_uri( \"/rest/test/another/resource/uri/09USE7335NW3\") except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0])", "'&fields=name%2Cowner%2Cmodified' \\ '&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI) self.assertEqual([{'member': 'member'}], result) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def", "try: self.resource_client.build_subresource_uri(None, \"123456\", 'sub-path') except exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0])", "= self.resource_client.download(uri, file_path) self.assertFalse(result) class ResourceZeroBodyMixinTest(BaseTest): def setUp(self): self.connection =", "= {}, {} headers = {\"Content-Type\": \"application/json\", \"Extra\": \"extra\"} self.connection._apiVersion", "test_create_with_default_api_version_300(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value = {},", "\\ '&refresh=true' \\ '&view=day' mock_get.assert_called_once_with(expected_uri) @mock.patch.object(connection, 'get') def test_get_utilization_by_id_with_defaults(self, mock_get):", "force=True, timeout=-1) mock_delete.assert_called_once_with(uri) @mock.patch.object(connection, 'delete') def test_delete_all_should_return_true(self, mock_delete): mock_delete.return_value =", "default_values) self.assertEqual(result, expected) @mock.patch.object(connection, 'post_multipart_with_response_handling') def test_upload_should_call_post_multipart(self, mock_post_multipart): uri =", "[] self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers, self.resource_client.ensure_resource_data) @mock.patch.object(ResourceHelper, \"do_get\") def test_ensure_resource_raise_resource_not_found_exception_with_uri(self, mock_do_get): self.resource_client.data =", "result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(ResourceHelper, \"do_get\") def test_refresh(self, mock_do_get):", "mock_put.return_value = None, self.response_body self.resource_client.URI = \"/rest/enclosures\" result = self.resource_client.update_with_zero_body(", "= \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = False mock_open.return_value = io.StringIO() result =", "\"get\") def test_get_all_with_custom_uri(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources\") uri = \"/rest/testuri/12467836/subresources?start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection,", "test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value = None, self.response_body self.resource_client.URI = \"/rest/enclosures\" result", "\"test\"} uri = \"/rest/resource/test\" mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=uri,", "delete_result = self.resource_client.delete(resource) self.assertTrue(delete_result) mock_delete.assert_called_once_with(\"uri\", custom_headers=None) def test_delete_with_empty_dict(self): try: self.resource_client.delete({})", "@mock.patch.object(connection, \"get\") def test_get_collection_with_multiple_filters(self, mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=[\"name1=one\", \"name2=two\",", "dict_to_update result = self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(result, dict_to_update) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor,", "def test_get_by_name_with_result(self, mock_get_by): self.resource_client.get_by_name(\"Resource Name,\") mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(Resource, \"get_by\")", "timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'post') def test_create_uri(self, mock_post): dict_to_create =", "self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client.build_uri(None) except ValueError as", "mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = [] mock_post.return_value = None,", "by applicable law or agreed to in writing, software #", "\"patch\") def test_patch_request_when_id_is_provided_v300(self, mock_patch, mock_ensure_resource): request_body = [{ \"op\": \"replace\",", "= self.resource_client.get_all(count=15) expected_items = [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"},", "try: self.resource_client._helper.build_subresource_uri(None, \"123456\", \"sub-path\") except exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0])", "{\"resource_name\": \"a name\"} mock_patch.return_value = {}, {} headers = {\"Content-Type\":", "None, self.response_body filter = \"name='Exchange Server'\" result = self.resource_helper.delete_all(filter=filter, force=True,", "dict_to_create = { \"resource_name\": \"a name\", } created_resource = {", "name\"} mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language':", "test_build_uri_with_none_should_raise_exception(self): try: self.resource_client.build_uri(None) except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else:", "self.assertTrue(result) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path =", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri = '/rest/testuri/' filepath", "= fake_associated_resurce result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_associated_resurce) @mock.patch.object(connection, 'post_multipart_with_response_handling')", "expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\", "@mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_do_post_request(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput']", "def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\"", "timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'put') def test_update_with_uri_called_once(self, mock_put): dict_to_update =", "mock_get_completed_task.return_value = task_with_output self.resource_client.create_report(\"/rest/path/create-report\") mock_post.assert_called_once_with(\"/rest/path/create-report\", {}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task')", "'get_all') def test_get_by_property_with_uri(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/testuri/5435534/sub') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri/5435534/sub') @mock.patch.object(ResourceClient,", "def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value =", "mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_and_custom_headers(self,", "= self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_given_uri(self,", "\"SPPgen9snap6.2015_0405.81.iso\") @mock.patch.object(connection, \"post_multipart_with_response_handling\") def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]}, {'nextPageUri': None, 'members':", "@mock.patch.object(connection, 'get') def test_get_collection_uri(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"},", "test_patch_request_when_id_is_provided_v300(self, mock_patch, mock_ensure_resource): request_body = [{ \"op\": \"replace\", \"path\": \"/name\",", "= self.URI self.resource_client.DEFAULT_VALUES = self.DEFAULT_VALUES self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client._merge_default_values()", "self.response_body self.resource_client.URI = \"/rest/enclosures\" result = self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result,", "for this resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" fake_resource = FakeResource(None) try:", "'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri", "was not raised\") def test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self): message = \"Unrecognized URI for", "timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) def test_merge_api_default_values(self): resource = {'name': 'resource1'}", "from hpOneView.connection import connection from hpOneView import exceptions from hpOneView.resources.resource", "mock_get.assert_called_once_with(expected_uri) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"get\") def test_get_utilization_with_multiple_filters(self, mock_get, mock_ensure_resource): self.resource_client.get_utilization(", "mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(connection, \"post\") def test_create_should_return_new_resource_instance(self, mock_post): mock_post.return_value", "uri, \"type\": \"typeV300\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) expected_uri =", "mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\"))", "= self.task, {} mock_wait4task.return_value = self.task self.resource_client.create({\"test\": \"test\"}, timeout=60) mock_wait4task.assert_called_once_with(self.task,", "= [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\":", "self.resource_client.get_collection('12345') self.assertEqual(len(collection), 2) @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property(self, mock_get_all): self.resource_client.get_by('name', 'MyFibreNetwork')", "custom_headers=None) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post): mock_post.return_value", "self.resource_client.get_by_name('Resource Name,') self.assertIsNone(response) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,') @mock.patch.object(connection, 'get') def test_get_collection_uri(self,", "\"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"get_completed_task\") def test_patch_request_custom_headers(self, mock_task, mock_patch, mock_ensure_resource):", "def test_build_subresource_uri(self): options = [ dict( resource='1', subresource='2', path='sub', uri='/rest/testuri/1/sub/2'),", "self.assertEqual(result, []) @mock.patch.object(connection, 'post') def test_create_report_should_raise_exception_when_not_task(self, mock_post): task_with_output = self.task.copy()", "'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_return_entity(self, mock_wait4task, mock_post): dict_to_create = {", "{\"resource_name\": \"a name\", \"uri\": uri} mock_put.return_value = self.task, {} mock_wait4task.return_value", "mock_update.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.URI = \"/rest/enclosures\"", "mock_wait4task, mock_patch, mock_ensure_resource): entity = {\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task,", "\"value\"}] response = self.resource_client.get_by_name('Resource Name,') self.assertEqual(response, {\"name\": \"value\"}) mock_get_by.assert_called_once_with(\"name\", 'Resource", "self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( \"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "= self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.delete('1', custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={'Accept-Language':", "= entity self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', -1) mock_wait4task.assert_called_once_with(self.task, mock.ANY)", "ResourceUtilizationMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceUtilization(self.connection)", "mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "try: self.resource_client._helper.build_uri( \"/rest/test/another/resource/uri/09USE7335NW3\") except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else:", "test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {'nextPageUri': None, 'members': []} result =", "number of results to be returned but for pagination purposes,", "self.resource_client.get_utilization('/rest/testuri/09USE7335NW3') expected_uri = '/rest/testuri/09USE7335NW3/utilization' mock_get.assert_called_once_with(expected_uri) def test_get_utilization_with_empty(self): try: self.resource_client.get_utilization('') except", "def test_create_report_should_wait_task_completion(self, mock_get_completed_task, mock_post): task_with_output = self.task.copy() task_with_output['taskOutput'] = []", "test_create_with_empty_dict(self): try: self.resource_client.create({}) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0])", "StubResource(Resource): \"\"\"Stub class to test resource common methods\"\"\" URI =", "mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with(", "mock_get): try: self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources') except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0]) else:", "test_delete_with_dict_uri(self, mock_delete): resource = {\"uri\": \"uri\"} mock_delete.return_value = {}, {}", "class to test resource file operations\"\"\" class StubResourceZeroBody(ResourceZeroBodyMixin, Resource): \"\"\"Stub", "{}, {} self.connection._apiVersion = 200 self.resource_client.patch(\"operation\", \"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY,", "None, fake_response_body result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, 'download_to_stream')", "= '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(extracted_id, '3518be0e-17c1-4189-8f81-83f3724f6155') def test_extract_id_from_uri_unsupported(self): #", "= {\"resource_name\": \"a name\", \"type\": \"anotherType\"} self.resource_client.create(dict_to_create) mock_post.assert_called_once_with(self.URI, expected, custom_headers=None)", "expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \\ '&refresh=true' \\", "try: self.resource_client._helper.build_uri(None) except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected", "dict( resource=\"5\", subresource=\"/rest/testuri/5/sub/6\", path=\"sub\", uri=\"/rest/testuri/5/sub/6\"), dict( resource=\"/rest/testuri/7\", subresource=\"/rest/testuri/7/sub/8\", path=\"sub\", uri=\"/rest/testuri/7/sub/8\"),", "self.resource_client.upload(filepath, uri) mock_wait4task.not_been_called() @mock.patch.object(connection, \"post_multipart_with_response_handling\") @mock.patch.object(TaskMonitor, \"wait_for_task\") @mock.patch.object(connection, \"get\") def", "Exception was not raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client.build_uri('') except ValueError", "= { '200': {'type': TYPE_V200}, '300': {'type': TYPE_V300} } def", "mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_custom_headers(self, mock_post): dict_to_create =", "'members': [{'id': '7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all() expected_calls", "self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.update(dict_to_update, False) self.assertEqual(self.task, self.resource_client.data) mock_update.assert_called_once_with(uri,", "\"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri) @mock.patch.object(connection, \"download_to_stream\")", "\"typeV300\"} mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource,", "test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In this case, the user provides a", "self.resource_helper = ResourceHelper(self.URI, self.connection, None) @mock.patch.object(ResourceHelper, \"do_put\") @mock.patch.object(Resource, \"ensure_resource_data\") def", "\"/schema\") @mock.patch.object(connection, 'get') def test_get_by_id_uri(self, mock_get): self.resource_client.get('12345') mock_get.assert_called_once_with(self.URI + \"/12345\")", "resource = {'name': 'resource1'} default_values = {} expected = {'name':", "\"name NE 'WrongName'\" mock_get.return_value = {\"members\": [{\"member\": \"member\"}]} result =", "try: self.resource_client.delete(dict_to_delete, False, -1) except exceptions.HPOneViewUnknownType as e: self.assertEqual(\"Unknown object", "None, self.response_body expected_dict = {\"name\": \"test\", \"type\": self.TYPE_V300} self.resource_client.update(dict_to_update, uri=uri,", "[{\"name\": \"expected\"}, {\"name\": \"not expected\"}] response = self.resource_client.get_by('connection.name', 'expected') self.assertEqual(response,", "@mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_with_custom_headers(self, mock_wait4task, mock_delete): mock_delete.return_value =", "+ \"/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, 'get') def test_get_collection_should_return_list(self, mock_get): mock_get.return_value = {\"members\":", "\"members\": members, \"uri\": uri } result = self.resource_client.get_all() self.assertSequenceEqual(result, members)", "[{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}]}, {\"nextPageUri\":", "\"/field\", \"value\", custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection,", "\"1\"}, {\"id\": \"2\"}, {\"id\": \"3\"}, {\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\":", "\"en_US\"} class ResourceFileHandlerMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client", "TYPE_V200 = \"typeV200\" TYPE_V300 = \"typeV300\" DEFAULT_VALUES = { \"200\":", "def __init__(self, con): self._connection = con self._client = ResourceClient(con, \"/rest/fake/resource\")", "{'type': TYPE_V200}, '300': {'type': TYPE_V300} } def setUp(self): super(ResourceClientTest, self).setUp()", "expected_result) @mock.patch.object(connection, \"get\") def test_get_all_called_once(self, mock_get): filter = \"'name'='OneViewSDK \\\"Test", "sort) uri = \"{resource_uri}?start=1\" \\ \"&count=500\" \\ \"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27\" \\ \"&query=name%20NE%20%27WrongName%27\"", "@mock.patch.object(connection, \"get\") def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=1\"]", "self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_should_not_override_resource_properties(self,", "applicable law or agreed to in writing, software # distributed", "70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_request_when_uri_is_provided(self,", "{\"resource_id\": \"123a53cz\"} mock_patch.return_value = self.task, self.task mock_wait4task.return_value = entity result", "200 expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1,", "self.task, {} mock_get_completed_task.return_value = task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, task_output)", "Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values,", "exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else: self.fail(\"Expected Exception was not", "\"delete\") def test_delete_should_return_true(self, mock_delete, mock_ensure_resource): mock_delete.return_value = None, self.response_body self.resource_client.data", "timeout=-1) mock_delete.assert_called_once_with(uri) @mock.patch.object(connection, 'delete') def test_delete_all_should_return_true(self, mock_delete): mock_delete.return_value = None,", "'en_US'}) @mock.patch.object(connection, 'patch') def test_patch_with_custom_headers_v300(self, mock_patch): mock_patch.return_value = {}, {}", "self.assertEqual(result, []) @mock.patch.object(ResourceHelper, \"do_get\") def test_refresh(self, mock_do_get): updated_data = {\"resource_name\":", "= results self.resource_client.get_all() expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])] self.assertEqual(mock_get.call_args_list, expected_calls)", "ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_update_with_none(self):", "def test_get_all_with_defaults(self, mock_get): self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get')", "get_by_return_value = [{\"name\": \"testname\", \"uri\": \"/rest/testuri\"}] self.resource_client.data = {\"name\": \"testname\"}", "\"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, \"put\") def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value", "timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'post') def test_create_with_zero_body_without_task(self, mock_post): mock_post.return_value =", "\"/rest/testuri\" mock_put.return_value = None, self.response_body expected_dict = {\"name\": \"test\", \"type\":", "### # (C) Copyright [2019] Hewlett Packard Enterprise Development LP", "+ \"?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree\") @mock.patch.object(connection, \"get\") def test_get_collection_should_return_list(self, mock_get): mock_get.return_value = {\"members\":", "@mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update): mock_update.return_value =", "self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/testuri/5435534/sub') mock_get_all.assert_called_once_with(filter=\"\\\"name='MyFibreNetwork'\\\"\", uri='/rest/testuri/5435534/sub') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property_with__invalid_uri(self, mock_get_all):", "\"8\"}]}] mock_get.side_effect = results self.resource_client.get_all() expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]", "def test_update_with_api_version_200(self, mock_put): dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\"", "self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(self.resource_client.data, dict_to_update) @mock.patch.object(Resource, \"get_by\") def test_get_by_name_with_result(self, mock_get_by): self.resource_client.get_by_name(\"Resource", "fake_response_body = mock.Mock() uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post): response_body = {\"resource_name\":", "self.fail(\"Expected Exception was not raised\") def test_build_uri_with_empty_str_should_raise_exception(self): try: self.resource_client._helper.build_uri('') except", "result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, self.response_body) @mock.patch.object(connection, 'post') def", "'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection, 'get') def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart):", "= {\"Extra\": \"extra\"} self.connection._apiVersion = 300 self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers) mock_patch.assert_called_once_with(", "\"extra\", \"Content-Type\": \"application/json-patch+json\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "\"taskState\": \"Finished\"} self.response_body = {\"body\": \"body\"} self.custom_headers = {\"Accept-Language\": \"en_US\"}", "None, \"members\": None} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(ResourceHelper, \"do_get\")", "= self.resource_client.create({}) self.assertNotEqual(self.resource_client, new_instance) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_wait_for_activity_on_create(self,", "file_path) self.assertFalse(result) class ResourceZeroBodyMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300)", "self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(connection, 'put') def test_update_should_not_override_resource_properties(self,", "self.resource_client.update({}) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail()", "'post_multipart_with_response_handling') def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart): filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None,", "e: self.assertEqual(\"Unknown object type\", e.args[0]) else: self.fail() @mock.patch.object(connection, 'get') def", "\"a name\"} mock_post.return_value = {}, {} resource_client = ResourceClient(self.connection, self.URI)", "self.resource_client._merge_default_values() self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def", "try: self.resource_client.update({}) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else:", "\"/name\", \"value\": \"new_name\", }] mock_patch.return_value = {}, {} self.resource_client.patch(\"replace\", \"/name\",", "was not raised\") @mock.patch.object(connection, 'get') def test_get_utilization_with_args(self, mock_get): self.resource_client.get_utilization('09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower',", "response = self.resource_client.get_by_name('Resource Name,') self.assertEqual(response, {\"name\": \"value\"}) mock_get_by.assert_called_once_with(\"name\", 'Resource Name,')", "def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get): \"\"\" In this case, the user provides", "mock_do_get): self.resource_client.data = {\"uri\": \"/uri/test\"} mock_do_get.return_value = [] with self.assertRaises(exceptions.HPOneViewResourceNotFound):", "'wait_for_task') def test_create_return_entity(self, mock_wait4task, mock_post): dict_to_create = { \"resource_name\": \"a", "'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri", "= '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' mock_download_to_stream.return_value = True mock_open.return_value = io.StringIO() result =", "dict_to_update = {\"name\": \"test\"} uri = \"/rest/testuri\" mock_put.return_value = None,", "{} mock_wait4task.return_value = dict_to_update result = self.resource_client.update(dict_to_update, timeout=-1) self.assertEqual(result, dict_to_update)", "] self.assertEqual(result_list, expected_list) def test_raise_unavailable_method_exception(self): self.assertRaises(exceptions.HPOneViewUnavailableMethod, unavailable_method) class FakeResource(object): def", "fake_resource = FakeResource(None) try: fake_resource.get_fake(uri) except exceptions.HPOneViewUnknownType as exception: self.assertEqual(message,", "= {}, {} self.connection._apiVersion = 200 self.resource_client._merge_default_values() expected_dict = {\"resource_name\":", "= \"name NE 'WrongName'\" mock_get.return_value = {\"members\": [{\"member\": \"member\"}]} result", "uri_list = ['/rest/testuri?start=0&count=15', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{'nextPageUri': uri_list[1], 'members':", "self.assertTrue(result) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream): file_path =", "self.assertEqual(merged_resource, expected_resource) def test_merge_default_values(self): default_type = {\"type\": \"type1\"} resource1 =", "test_create_when_the_resource_is_a_list(self, mock_post): dict_to_create = [{\"resource_name\": \"a name\"}] mock_post.return_value = {},", "\"/rest/testuri\", {}, custom_headers=None) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_and_custom_headers(self, mock_wait4task,", "{}, {} headers = {'Extra': 'extra'} self.connection._apiVersion = 300 resource_client", "mock.Mock() self.resource_client.upload(filepath) mock_post_multipart.assert_called_once_with('/rest/testuri', mock.ANY, mock.ANY) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') @mock.patch.object(connection,", "# You may obtain a copy of the License at", "self.response_body mock_wait4task.return_value = self.task filter = \"name='Exchange Server'\" uri =", "self.resource_client.data) mock_update.assert_called_once_with(uri, expected, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\")", "task_with_output result = self.resource_client.create_report(\"/rest/path/create-report\") self.assertEqual(result, task_output) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task')", "= [] mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output result", "= self.task self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(connection, 'put')", "mock_post_multipart.return_value = self.task, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, \"post_multipart_with_response_handling\")", "connection('127.0.0.1', 300) self.resource_client = StubResourceZeroBody(self.connection) super(ResourceZeroBodyMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor,", "\"get\") def test_get_all_should_return_all_items_when_response_paginated(self, mock_get): uri_list = [\"/rest/testuri?start=0&count=-1\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=1\"] results", "[{'id': '7'}, {'id': '8'}]}] mock_get.side_effect = results self.resource_client.get_all(count=3) mock_get.assert_called_once_with(uri_list[0]) @mock.patch.object(connection,", "new_resource = self.resource_client.create_with_zero_body(timeout=-1) self.assertNotEqual(new_resource, self.resource_client) @mock.patch.object(connection, \"post\") def test_create_with_zero_body_without_task(self, mock_post):", "this resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" fake_resource = FakeResource(None) try: fake_resource.get_fake(uri)", "name\", \"uri\": uri} mock_put.return_value = self.task, {} mock_wait4task.return_value = dict_to_update", "self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_api_version_200(self,", "mock_get): mock_get.return_value = {} self.resource_helper.get_collection(filter=\"name=name\") mock_get.assert_called_once_with(self.URI + \"?filter=name%3Dname\") @mock.patch.object(connection, \"get\")", "= '127.0.0.1' self.connection = connection(self.host, 300) self.resource_client = ResourceClient(self.connection, self.URI)", "[]) @mock.patch.object(connection, 'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_all_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value", "= {}, {} self.connection._apiVersion = 200 expected_dict = {\"resource_name\": \"a", "def test_patch_with_custom_headers_v200(self, mock_patch): mock_patch.return_value = {}, {} self.connection._apiVersion = 200", "resource_client = ResourceClient(self.connection, self.URI) resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value', custom_headers=self.custom_headers) mock_patch.assert_called_once_with(mock.ANY,", "'/rest/testuri/id', dict_info, custom_headers={'Extra': 'extra', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task')", "'four': True, 'one': True, 'tree': 3, 'two': True}) def test_extract_id_from_uri(self):", "custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_default_api_version_300(self, mock_put, mock_ensure_resource): dict_to_update", "mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body(timeout=-1) mock_post.assert_called_once_with( \"/rest/testuri\",", "test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_helper.get_all(uri=\"/rest/testuri/12467836/subresources?param=value\") uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri) @mock.patch.object(connection, \"get\") def", "\"post\") def test_create_without_default_values(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"} mock_post.return_value", "mock_delete.assert_called_once_with(\"uri\", custom_headers=None) def test_delete_with_empty_dict(self): try: self.resource_client.delete({}) except ValueError as e:", "entity self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', -1) mock_wait4task.assert_called_once_with(self.task, mock.ANY) def", "{\"resource_name\": \"a name\", \"type\": self.TYPE_V300} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)", "mock_get_by.assert_called_once_with(\"name\", \"Resource Name,\") @mock.patch.object(Resource, \"get_by\") def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value =", "mock_get): uri = '/rest/testuri?start=0&count=-1' members = [{'id': '1'}, {'id': '2'},", "try: self.resource_client.delete(None) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else:", "fields='AmbientTemperature,AveragePower,PeakPower', filter='startDate=2016-05-30T03:29:42.361Z', refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\", "Development LP # # Licensed under the Apache License, Version", "self.assertRaises(exceptions.HPOneViewResourceNotFound): self.resource_client.ensure_resource_data(update_data=True) @mock.patch.object(ResourceHelper, \"do_get\") @mock.patch.object(Resource, \"get_by\") def test_ensure_resource_should_update_resource_data(self, mock_do_get, mock_get_by):", "ResourceClient(self.connection, self.URI) resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', 70) mock_patch.assert_called_once_with( '/rest/testuri/123a53cz',", "self.resource_client.create_report(\"/rest/path/create-report\") mock_post.assert_called_once_with(\"/rest/path/create-report\", {}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'get_completed_task') def test_create_report_should_wait_task_completion(self, mock_get_completed_task,", "@mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream): file_path = \"~/archive.log\" uri =", "test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get): uri = \"/rest/testuri?start=0&count=-1\" members = [{\"id\": \"1\"}, {\"id\":", "@mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update, mock_ensure_resource): mock_update.return_value", "\"members\": []} result = self.resource_client.get_all() self.assertEqual(result, []) @mock.patch.object(connection, \"get\") def", "path=\"sub\", uri=\"/rest/testuri/5/sub/6\"), dict( resource=\"/rest/testuri/7\", subresource=\"/rest/testuri/7/sub/8\", path=\"sub\", uri=\"/rest/testuri/7/sub/8\"), dict( resource=None, subresource=\"/rest/testuri/9/sub/10\",", "results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\": \"1\"}, {\"id\": \"2\"}, {\"id\":", "'get') def test_get_all_called_once(self, mock_get): filter = \"'name'='OneViewSDK \\\"Test FC Network'\"", "@mock.patch.object(ResourceClient, 'get_by') def test_get_by_name_without_result(self, mock_get_by): mock_get_by.return_value = [] response =", "ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_get_with_none(self):", "mock_post): task_output = [ {\"type\": \"FCIssueResponseV2\", \"created\": \"2015-03-24T15: 32: 50.889Z\"},", "entity result = self.resource_client.patch( '123a53cz', 'replace', '/name', 'new_name', -1) self.assertEqual(result,", "def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources') except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI,", "timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") @mock.patch.object(TaskMonitor,", "= ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=1'] results = [{'nextPageUri': uri_list[1], 'members': [{'id':", "test_delete_with_empty_dict(self): try: self.resource_client.delete({}) except ValueError as e: self.assertTrue(\"Resource\" in e.args[0])", "except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0]) else: self.fail(\"Expected Exception was", "\"get\") def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get): mock_get.return_value = {\"nextPageUri\": None, \"members\": []}", "\"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() mock_wait4task.return_value = fake_associated_resurce result =", "test_update_with_uri_called_once(self, mock_put): dict_to_update = {\"name\": \"test\"} uri = \"/rest/resource/test\" mock_put.return_value", "self.resource_client.get_utilization('09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower', filter='startDate=2016-05-30T03:29:42.361Z', refresh=True, view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z'", "\"value\"}]} collection = self.resource_client.get_collection('12345') self.assertEqual(len(collection), 2) @mock.patch.object(ResourceClient, 'get_all') def test_get_by_property(self,", "= '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(id, extracted_id) def test_extract_id_from_uri_with_extra_slash(self): uri", "mock_ensure_resource.assert_called_once() def test_ensure_resource_raise_unique_identifier_exception(self): self.resource_client.data = [] self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers, self.resource_client.ensure_resource_data) @mock.patch.object(ResourceHelper, \"do_get\")", "test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_helper.get_all(uri=\"/rest/other/resource/12467836/subresources\") except exceptions.HPOneViewUnknownType as e: self.assertEqual(UNRECOGNIZED_URI, e.args[0])", "filter = \"name='Exchange Server'\" result = self.resource_client.delete_all(filter=filter, force=True, timeout=-1) self.assertTrue(result)", "{ '200': {'type': TYPE_V200}, '300': {'type': TYPE_V300} } def setUp(self):", "\"value\"}, {\"key\": \"value\"}]} collection = self.resource_helper.get_collection() self.assertEqual(len(collection), 2) def test_build_uri_with_id_should_work(self):", "= {\"resource_data\": \"resource_data\", \"uri\": uri, \"type\": \"typeV300\"} mock_update.return_value = self.task,", "mock.ANY) class ResourceUtilizationMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client", "mock_post.return_value = {}, {} self.resource_client.create(dict_to_create, custom_headers=self.custom_headers) mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"})", "merge_default_values([resource1, resource2], default_type) expected_list = [ {\"name\": \"resource1\", \"type\": \"type1\"},", "@mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body =", "self.resource_client._helper.build_uri( \"/rest/test/another/resource/uri/09USE7335NW3\") except exceptions.HPOneViewUnknownType as exception: self.assertEqual(UNRECOGNIZED_URI, exception.args[0]) else: self.fail(\"Expected", "expected_dict, custom_headers=None) @mock.patch.object(connection, 'post') def test_create_with_default_api_version_300(self, mock_post): dict_to_create = {\"resource_name\":", "None self.assertEqual(actual_result, expected_result) @mock.patch.object(connection, \"get\") def test_get_all_called_once(self, mock_get): filter =", "mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'}) @mock.patch.object(connection, 'post') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_create_with_zero_body_return_entity(self,", "= mock.Mock() uri = \"/rest/testuri/\" filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value =", "= {} self.resource_helper.get_collection(path=\"/test\") mock_get.assert_called_once_with(self.URI + \"/test\") @mock.patch.object(connection, \"get\") def test_get_collection_with_multiple_filters(self,", "self.assertEqual(result, []) @mock.patch.object(connection, \"get\") def test_get_all_should_return_empty_list_when_no_members(self, mock_get): mock_get.return_value = {\"nextPageUri\":", "\"ensure_resource_data\") @mock.patch.object(connection, \"delete\") def test_delete_should_return_true(self, mock_delete, mock_ensure_resource): mock_delete.return_value = None,", "uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155' id = '3518be0e-17c1-4189-8f81-83f3724f6155' extracted_id = extract_id_from_uri(uri) self.assertEqual(id,", "'delete') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_delete_by_id_called_once(self, mock_wait4task, mock_delete): mock_delete.return_value = self.task,", "def test_build_uri_with_none_should_raise_exception(self): try: self.resource_client.build_uri(None) except ValueError as exception: self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])", "uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def test_get_by_with_incorrect_result_skip_autofix(self, mock_get_all): mock_get_all.return_value = [{\"name\": \"expected\"},", "{} resource_client = ResourceClient(self.connection, self.URI) resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)", "@mock.patch.object(connection, \"delete\") def test_delete_should_return_true(self, mock_delete, mock_ensure_resource): mock_delete.return_value = None, self.response_body", "mock_laod_resource): dict_to_update = {\"name\": \"test\"} uri = \"/rest/testuri\" expected =", "from mock import call from tests.test_utils import mock_builtin from hpOneView.connection", "setUp(self): self.connection = connection('127.0.0.1', 300) self.resource_client = StubResourceFileHandler(self.connection) super(ResourceFileHandlerMixinTest, self).setUp(self.resource_client)", "self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_call_download_to_stream_with_given_uri(self, mock_open,", "import call from tests.test_utils import mock_builtin from hpOneView.connection import connection", "name\"} mock_post.return_value = {}, {} expected_dict = {\"resource_name\": \"a name\",", "\"123456\", \"sub-path\") except exceptions.HPOneViewValueError as exception: self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0]) else: self.fail(\"Expected", "'application/json', 'Extra': 'extra'} self.connection._apiVersion = 300 resource_client = ResourceClient(self.connection, self.URI)", "headers = {'Extra': 'extra'} self.connection._apiVersion = 300 resource_client = ResourceClient(self.connection,", "@mock.patch.object(connection, 'post') def test_create_with_default_api_version_300(self, mock_post): dict_to_create = {\"resource_name\": \"a name\"}", "def test_delete_with_dict_uri(self, mock_delete): resource = {\"uri\": \"uri\"} mock_delete.return_value = {},", "test_build_uri_with_uri_should_work(self): input = '/rest/testuri/09USE7335NW3' expected_output = '/rest/testuri/09USE7335NW3' result = self.resource_client.build_uri(input)", "'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): fake_response_body = mock.Mock()", "\"/rest/testuri\" dict_to_update = {\"resource_data\": \"resource_data\", \"uri\": uri} expected = {\"resource_data\":", "file_path) mock_download_to_stream.assert_called_once_with(mock.ANY, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_call_download_to_stream_with_open_file(self, mock_open,", "@mock.patch.object(connection, \"post\") def test_create_should_return_new_resource_instance(self, mock_post): mock_post.return_value = {}, {} new_instance", "example is not supported yet uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/otherthing' extracted_id =", "mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('1',", "self.resource_client = StubResourcePatch(self.connection) super(ResourcePatchMixinTest, self).setUp(self.resource_client) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"patch\") def", "ValueError as e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_get_by_with_name_none(self):", "uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results = [{'nextPageUri': uri_list[1], 'members':", "mock_get.assert_called_once_with(\"/rest/testuri/123\") @mock.patch.object(connection, \"get\") def test_get_by_id_without_result(self, mock_get): mock_get.return_value = [] response", "mock_delete): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task delete_task =", "to test resource common methods\"\"\" URI = \"/rest/testuri\" class BaseTest(unittest.TestCase):", "self.resource_client.update_with_zero_body( \"/rest/enclosures/09USE133E5H4/configuration\", timeout=-1) self.assertEqual(result, self.response_body) class ResourcePatchMixinTest(BaseTest): def setUp(self): self.connection", "= StubResourceZeroBody(self.connection) super(ResourceZeroBodyMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_create_with_zero_body_called_once(self,", "'get') def test_get_collection_uri(self, mock_get): mock_get.return_value = {\"members\": [{\"key\": \"value\"}, {\"key\":", "io import unittest import mock from mock import call from", "\"typeV300\"} mock_update.return_value = self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.update(dict_to_update, False)", "mock_wait4task.return_value = self.task self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration', timeout=-1) mock_update.assert_called_once_with( \"/rest/enclosures/09USE133E5H4/configuration\", None, custom_headers=None) @mock.patch.object(connection,", "[{\"key\": \"value\"}, {\"key\": \"value\"}]} self.resource_helper.get_collection() mock_get.assert_called_once_with(self.URI) @mock.patch.object(connection, \"get\") def test_get_collection_with_filter(self,", "\"ensure_resource_data\") @mock.patch.object(connection, \"delete\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def test_delete_with_force(self, mock_ensure_resource, mock_delete, mock_wait4task):", "def test_upload_should_call_post_multipart(self, mock_post_multipart): uri = '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value", "mock_wait4task, mock_put, mock_ensure_resource): uri = \"/rest/testuri\" dict_to_update = {\"resource_name\": \"a", "\"members\": [{\"id\": \"4\"}, {\"id\": \"5\"}, {\"id\": \"6\"}]}, {'nextPageUri': None, \"members\":", "= [\"/rest/testuri?start=0&count=15\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\": uri_list[1], \"members\": [{\"id\":", "mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources') except exceptions.HPOneViewUnknownType", "file_path = \"~/archive.log\" uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315' fake_file = io.StringIO() mock_open.return_value", "{\"name\": \"test\", \"type\": \"anotherType\", \"uri\": uri} self.resource_client.update(dict_to_update) mock_put.assert_called_once_with(uri, expected, custom_headers=None)", "self.connection._apiVersion = 200 self.resource_client._merge_default_values() expected_dict = {\"resource_name\": \"a name\", \"type\":", "'members': [{'id': '7'}]}] mock_get.side_effect = results result = self.resource_client.get_all() expected_items", "mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US', 'Content-Type': 'application/json-patch+json'}) @mock.patch.object(connection, 'patch') @mock.patch.object(TaskMonitor, 'wait_for_task')", "\"License\"); # you may not use this file except in", "self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None) @mock.patch.object(connection, \"post\") def test_create_with_default_api_version_300(self,", "\"resource_data\", \"uri\": \"a_uri\"} mock_update.return_value = self.task, self.response_body mock_wait4task.return_value = self.task", "'application/json-patch+json'}) @mock.patch.object(connection, 'patch') def test_patch_request_when_uri_is_provided(self, mock_patch): request_body = [{ 'op':", "mock_download_to_stream.assert_called_once_with(fake_file, mock.ANY) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path", "self.resource_client.get_all() uri = \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri(self, mock_get):", "{\"resource_name\": \"a name\"} mock_patch.return_value = {}, {} headers = {\"Extra\":", "mock_ensure_resource): mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch(\"operation\", \"/field\",", "limitations under the License. ### import io import unittest import", "self.fail() def test_create_with_empty_dict(self): try: self.resource_client.create({}) except ValueError as e: self.assertTrue(\"Resource\"", "mock_delete, mock_wait4task): mock_delete.return_value = self.task, self.response_body mock_wait4task.return_value = self.task self.resource_client.data", "\"not expected\"}] response = self.resource_client.get_by('name', 'exPEcted') self.assertEqual(response, [{\"name\": \"EXpected\"}]) mock_get_all.assert_called_once_with(filter=\"\\\"name='exPEcted'\\\"\",", "uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream') @mock.patch(mock_builtin('open')) def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream): file_path", "\"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection,", "{ '200': {\"type\": \"EnclosureGroupV200\"}, '300': {\"type\": \"EnclosureGroupV300\"} } expected =", "option['subresource'], option['path']) self.assertEqual(uri, option['uri']) def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self): try: self.resource_client.build_subresource_uri(None, \"123456\", 'sub-path')", "self.resource_client.data = {\"uri\": \"/rest/testuri\"} result = self.resource_client.delete() self.assertTrue(result) @mock.patch.object(connection, 'delete')", "@mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update): mock_update.return_value = self.task, self.task", "timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'put') def test_update_with_zero_body_without_task(self, mock_put): mock_put.return_value =", "\"replace\", \"path\": \"/name\", \"value\": \"new_name\", }] mock_patch.return_value = {}, {}", "= '/rest/testuri/' filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = None, fake_response_body result", "e.args[0]) else: self.fail() def test_update_with_none(self): try: self.resource_client.update(None) except ValueError as", "class StubResourcePatch(ResourcePatchMixin, Resource): \"\"\"Stub class to test resource patch operations\"\"\"", "mock_download_to_stream): file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_download_to_stream.return_value = False", "= \"{resource_uri}?start=0&count=-1\".format(resource_uri=self.URI) mock_get.assert_called_once_with(uri) @mock.patch.object(connection, 'get') def test_get_all_with_custom_uri(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources') uri", "e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_get_with_none(self): try: self.resource_client.get(None)", "\"uri\"} mock_delete.return_value = {}, {} delete_result = self.resource_client.delete(resource) self.assertTrue(delete_result) mock_delete.assert_called_once_with(\"uri\",", "mock_wait4task, mock_post): mock_post.return_value = self.task, self.task mock_wait4task.return_value = self.task self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration',", "self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, mock.ANY) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def", "mock_get.return_value = {} self.resource_helper.get_collection(filter=\"name=name\") mock_get.assert_called_once_with(self.URI + \"?filter=name%3Dname\") @mock.patch.object(connection, \"get\") def", "fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, uri, custom_headers=mock.ANY) @mock.patch.object(connection, 'download_to_stream')", "\"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_uri_called_once(self, mock_put, mock_ensure_resource): uri = \"/rest/testuri\"", "'wait_for_task') def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart): uri = '/rest/testuri/' filepath =", "'get') def test_get_schema_uri(self, mock_get): self.resource_client.get_schema() mock_get.assert_called_once_with(self.URI + \"/schema\") @mock.patch.object(connection, 'get')", "expected_dict = {\"resource_name\": \"a name\", \"type\": self.TYPE_V200} self.resource_client.create(dict_to_create, timeout=-1) mock_post.assert_called_once_with(self.URI,", "'get') def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources') except exceptions.HPOneViewUnknownType as e:", "result = self.resource_client.create_with_zero_body( '/rest/enclosures/09USE133E5H4/configuration', timeout=-1) self.assertEqual(result, response_body) @mock.patch.object(connection, 'post') def", "\"typeV200\" TYPE_V300 = \"typeV300\" DEFAULT_VALUES = { \"200\": {\"type\": TYPE_V200},", "\"/rest/resource/test\" mock_put.return_value = None, self.response_body self.resource_client.update(dict_to_update, uri=uri, force=True) expected_uri =", "self.assertEqual(result, expected) def test_should_not_merge_when_default_values_not_defined(self): resource = {'name': 'resource1'} default_values =", "'patch') def test_patch_request_when_id_is_provided_v200(self, mock_patch): request_body = [{ 'op': 'replace', 'path':", "self.resource_client.data = {\"uri\": \"/rest/testuri\"} self.resource_client.delete(custom_headers=self.custom_headers) mock_delete.assert_called_once_with(mock.ANY, custom_headers={\"Accept-Language\": \"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\")", "mock_patch.return_value = {}, {} self.connection._apiVersion = 200 self.resource_client.patch(\"operation\", \"/field\", \"value\",", "\"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" mock_open.return_value = io.StringIO() self.resource_client.download(uri, file_path) mock_download_to_stream.assert_called_once_with(mock.ANY,", "'put') def test_update_without_default_values(self, mock_put): dict_to_update = {\"name\": \"test\"} uri =", "class to test resoruce zero body methods\"\"\" class StubResourcePatch(ResourcePatchMixin, Resource):", "self.resource_client = StubResourceZeroBody(self.connection) super(ResourceZeroBodyMixinTest, self).setUp(self.resource_client) @mock.patch.object(connection, \"post\") @mock.patch.object(TaskMonitor, \"wait_for_task\") def", "filepath = \"test/SPPgen9snap6.2015_0405.81.iso\" mock_post_multipart.return_value = self.task, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task,", "view='day') expected_uri = '/rest/testuri/09USE7335NW3/utilization' \\ '?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \\ '&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z' \\ '&fields=AmbientTemperature%2CAveragePower%2CPeakPower'", "\"get_by\") def test_ensure_resource_raise_resource_not_found_exception_without_uri(self, mock_get_by): self.resource_client.data = {\"name\": \"testname\"} mock_get_by.return_value =", "{\"name\": \"test\", \"type\": \"anotherType\"} uri = \"/rest/testuri\" mock_put.return_value = None,", "\"en_US\"}) @mock.patch.object(Resource, \"ensure_resource_data\") @mock.patch.object(connection, \"put\") def test_update_with_force(self, mock_put, mock_laod_resource): dict_to_update", "3, 'two': True}) def test_extract_id_from_uri(self): uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155' id =", "= {\"Accept-Language\": \"en_US\"} class ResourceFileHandlerMixinTest(BaseTest): def setUp(self): self.connection = connection('127.0.0.1',", "mock_get): mock_get.return_value = {'nextPageUri': None, 'members': None} result = self.resource_client.get_all()", "def test_ensure_resource_should_update_resource_data(self, mock_do_get, mock_get_by): get_by_return_value = [{\"name\": \"testname\", \"uri\": \"/rest/testuri\"}]", "resource', e.args[0]) else: self.fail() @mock.patch.object(connection, 'put') @mock.patch.object(TaskMonitor, 'wait_for_task') def test_update_with_zero_body_called_once(self,", "@mock.patch.object(connection, 'get') def test_get_all_with_custom_uri_and_query_string(self, mock_get): self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value') uri = \"/rest/testuri/12467836/subresources?param=value&start=0&count=-1\" mock_get.assert_called_once_with(uri)", "file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, mock.ANY) @mock.patch.object(connection, \"download_to_stream\") @mock.patch(mock_builtin(\"open\")) def test_download_should_return_true_when_success(self,", "self.resource_client.get_by('name', 'exPEcted') self.assertEqual(response, [{\"name\": \"EXpected\"}]) mock_get_all.assert_called_once_with(filter=\"\\\"name='exPEcted'\\\"\", uri='/rest/testuri') @mock.patch.object(ResourceClient, 'get_all') def", "request_body = [{ 'op': 'replace', 'path': '/name', 'value': 'new_name', }]", "\"Unrecognized URI for this resource\" uri = \"/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32\" try: self.resource_client.get(uri)", "mock_open.return_value = fake_file self.resource_client.download(uri, file_path) mock_open.assert_called_once_with(file_path, 'wb') mock_download_to_stream.assert_called_once_with(fake_file, mock.ANY) @mock.patch.object(connection,", "file_path = \"~/archive.log\" uri = \"/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315\" fake_file = io.StringIO() mock_open.return_value", "@mock.patch.object(connection, \"get\") def test_get_all_with_different_resource_uri_should_fail(self, mock_get): try: self.resource_helper.get_all(uri=\"/rest/other/resource/12467836/subresources\") except exceptions.HPOneViewUnknownType as", "\"a name\", } mock_post.return_value = self.task, {} mock_wait4task.return_value = created_resource", "'get_completed_task') def test_patch_request_custom_headers(self, mock_task, mock_patch): dict_info = {\"resource_name\": \"a name\"}", "None, fake_response_body result = self.resource_client.upload(filepath, uri) self.assertEqual(result, fake_response_body) @mock.patch.object(connection, \"download_to_stream\")", "e: self.assertTrue(\"Resource\" in e.args[0]) else: self.fail() def test_get_by_with_name_none(self): try: self.resource_client.get_by(None,", "@mock.patch.object(TaskMonitor, \"wait_for_task\") def test_update_uri(self, mock_wait4task, mock_update, mock_ensure_resource): uri = \"/rest/testuri\"", "self.resource_client.build_uri(input) self.assertEqual(expected_output, result) def test_build_uri_with_uri_should_work(self): input = '/rest/testuri/09USE7335NW3' expected_output =", "self.task, mock.Mock() self.resource_client.upload(filepath, uri) mock_wait4task.assert_called_once_with(self.task, -1) @mock.patch.object(connection, 'post_multipart_with_response_handling') @mock.patch.object(TaskMonitor, 'wait_for_task')", "task_with_output['taskOutput'] = [] mock_post.return_value = self.task, {} mock_get_completed_task.return_value = task_with_output", "OneView. \"\"\" uri_list = [\"/rest/testuri?start=0&count=3\", \"/rest/testuri?start=3&count=3\", \"/rest/testuri?start=6&count=3\"] results = [{\"nextPageUri\":", "subresource='4', path='sub', uri='/rest/testuri/3/sub/4'), dict( resource='5', subresource='/rest/testuri/5/sub/6', path='sub', uri='/rest/testuri/5/sub/6'), dict( resource='/rest/testuri/7',", "mock_wait4task.return_value = self.task filter = \"name='Exchange Server'\" uri = \"/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True\"", "'get') def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get): uri_list = ['/rest/testuri?start=0&count=-1', '/rest/testuri?start=3&count=3', '/rest/testuri?start=6&count=3'] results" ]
[ "print_function, unicode_literals) from .core import UnitedStates class Colorado(UnitedStates): \"\"\"Colorado\"\"\" #", "__future__ import (absolute_import, division, print_function, unicode_literals) from .core import UnitedStates", "Colorado has only federal state holidays. # NOTE: <NAME> is", "UnitedStates class Colorado(UnitedStates): \"\"\"Colorado\"\"\" # Colorado has only federal state", "unicode_literals) from .core import UnitedStates class Colorado(UnitedStates): \"\"\"Colorado\"\"\" # Colorado", "has only federal state holidays. # NOTE: <NAME> is an", "(absolute_import, division, print_function, unicode_literals) from .core import UnitedStates class Colorado(UnitedStates):", "-*- coding: utf-8 -*- from __future__ import (absolute_import, division, print_function,", "utf-8 -*- from __future__ import (absolute_import, division, print_function, unicode_literals) from", "division, print_function, unicode_literals) from .core import UnitedStates class Colorado(UnitedStates): \"\"\"Colorado\"\"\"", "\"\"\"Colorado\"\"\" # Colorado has only federal state holidays. # NOTE:", "Colorado(UnitedStates): \"\"\"Colorado\"\"\" # Colorado has only federal state holidays. #", "from .core import UnitedStates class Colorado(UnitedStates): \"\"\"Colorado\"\"\" # Colorado has", "only federal state holidays. # NOTE: <NAME> is an optional", "federal state holidays. # NOTE: <NAME> is an optional holiday", "# -*- coding: utf-8 -*- from __future__ import (absolute_import, division,", "class Colorado(UnitedStates): \"\"\"Colorado\"\"\" # Colorado has only federal state holidays.", "coding: utf-8 -*- from __future__ import (absolute_import, division, print_function, unicode_literals)", "-*- from __future__ import (absolute_import, division, print_function, unicode_literals) from .core", "import (absolute_import, division, print_function, unicode_literals) from .core import UnitedStates class", "# Colorado has only federal state holidays. # NOTE: <NAME>", "import UnitedStates class Colorado(UnitedStates): \"\"\"Colorado\"\"\" # Colorado has only federal", ".core import UnitedStates class Colorado(UnitedStates): \"\"\"Colorado\"\"\" # Colorado has only", "from __future__ import (absolute_import, division, print_function, unicode_literals) from .core import" ]
[ "vid[..., ::-1] else: return vid return fc def black_vstripe_vid(p=0.5, size=10):", "fc def gaussian_noise(p=0.5, mean=0, sigma=0.02): def fc(img): if random.random() <", "class Augmenter: \"\"\" Generic data augmentation class with chained operations", "aug.augment(img) \"\"\" print(\"Using default image augmenter\") return Augmenter([ horizontal_flip(p), gaussian_noise(p,", "vstripe. Returns: Augmenter object. Use as: aug.augment(img) \"\"\" print(\"Using default", "::-1] else: return img return fc def vertical_flip(p=0.5): def fc(img):", "#!/usr/bin/env python # -*- coding: utf-8 -*- import random import", "-*- import random import numpy as np # Generic data", "< p: return img[..., ::-1, :] else: return img return", "aug.augment(img) \"\"\" return Augmenter([ horizontal_flip_vid(p), gaussian_noise(p, mean, sigma), black_hstripe_vid(p, size=strip_size),", "p: return vid[..., ::-1] else: return vid return fc def", "horizontal flip, vertical flip, gaussian noise, black hstripe, and black", "::-1] else: return vid return fc def black_vstripe_vid(p=0.5, size=10): def", "< p: j = int(random.random() * (batch.shape[-1]-size)) batch[..., j:j+size] =", "object. Use as: aug.augment(img) \"\"\" return Augmenter([ horizontal_flip_vid(p), gaussian_noise(p, mean,", "fc(vid): if random.random() < p: return vid[..., ::-1] else: return", "0 return batch else: return batch return fc def default_augmenter_vid(p=0.5,", "vid return fc def black_vstripe_vid(p=0.5, size=10): def fc(batch): if random.random()", "img[..., j:j+size] = 0 return img else: return img return", "fc(img): if random.random() < p: return img[..., ::-1, :] else:", "ops def add(self, op): self.ops.append(op) def augment(self, img): aug =", "Augmenter object. Use as: aug.augment(img) \"\"\" return Augmenter([ horizontal_flip_vid(p), gaussian_noise(p,", "ops must be a list of functions\") quit() self.ops =", "self.ops: aug = op(aug) return aug def __call__(self, img): return", "-*- coding: utf-8 -*- import random import numpy as np", "# -*- coding: utf-8 -*- import random import numpy as", "j:j+size] = 0 return batch else: return batch return fc", "random.random() < p: return img[..., ::-1, :] else: return img", "def horizontal_flip(p=0.5): def fc(img): if random.random() < p: return img[...,", "return img return fc def vertical_flip(p=0.5): def fc(img): if random.random()", "aug def __call__(self, img): return self.augment(img) ########## # Images #", "augment(self, img): aug = img.copy() for op in self.ops: aug", "* (batch.shape[-1]-size)) batch[..., j:j+size] = 0 return batch else: return", "p: j = int(random.random() * (img.shape[1]-size)) img[..., j:j+size] = 0", "return img return fc def gaussian_noise(p=0.5, mean=0, sigma=0.02): def fc(img):", "return img return fc def default_augmenter(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default", "return fc def gaussian_noise(p=0.5, mean=0, sigma=0.02): def fc(img): if random.random()", "with horizontal flip, vertical flip, gaussian noise, black hstripe, and", "add(self, op): self.ops.append(op) def augment(self, img): aug = img.copy() for", "return img else: return img return fc def black_hstripe(p=0.5, size=10):", "op in self.ops: aug = op(aug) return aug def __call__(self,", "in self.ops: aug = op(aug) return aug def __call__(self, img):", "return fc def black_vstripe(p=0.5, size=10): def fc(img): if random.random() <", "Returns: Augmenter object. Use as: aug.augment(img) \"\"\" print(\"Using default image", "j:j+size, :] = 0 return batch else: return batch return", "if random.random() < p: return img[..., ::-1] else: return img", "isinstance(ops, list): print(\"Error: ops must be a list of functions\")", "list of functions\") quit() self.ops = ops def add(self, op):", "as: aug.augment(img) \"\"\" print(\"Using default image augmenter\") return Augmenter([ horizontal_flip(p),", "= int(random.random() * (batch.shape[-2]-size)) batch[..., j:j+size, :] = 0 return", "fc(img): if random.random() < p: return img[..., ::-1] else: return", "< p: j = int(random.random() * (img.shape[1]-size)) img[..., j:j+size] =", "if random.random() < p: j = int(random.random() * (img.shape[1]-size)) img[...,", "fc def black_vstripe_vid(p=0.5, size=10): def fc(batch): if random.random() < p:", "flip, gaussian noise, black hstripe, and black vstripe. Returns: Augmenter", "img return fc def black_vstripe(p=0.5, size=10): def fc(img): if random.random()", "j = int(random.random() * (batch.shape[-1]-size)) batch[..., j:j+size] = 0 return", "return img return fc def black_hstripe(p=0.5, size=10): def fc(img): if", "# Videos # ########## def horizontal_flip_vid(p=0.5): def fc(vid): if random.random()", "if not isinstance(ops, list): print(\"Error: ops must be a list", "Augmenter: \"\"\" Generic data augmentation class with chained operations \"\"\"", "default_augmenter_vid(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default data augmentation with horizontal flip,", "of functions\") quit() self.ops = ops def add(self, op): self.ops.append(op)", "if random.random() < p: return img[..., ::-1, :] else: return", "return Augmenter([ horizontal_flip_vid(p), gaussian_noise(p, mean, sigma), black_hstripe_vid(p, size=strip_size), black_vstripe_vid(p, size=strip_size)", "j:j+size] = 0 return img else: return img return fc", "img return fc def vertical_flip(p=0.5): def fc(img): if random.random() <", "return batch else: return batch return fc def default_augmenter_vid(p=0.5, strip_size=3,", "black_hstripe(p=0.5, size=10): def fc(img): if random.random() < p: j =", "coding: utf-8 -*- import random import numpy as np #", "gauss else: return img return fc def black_vstripe(p=0.5, size=10): def", "img else: return img return fc def default_augmenter(p=0.5, strip_size=3, mean=0,", "int(random.random() * (img.shape[1]-size)) img[..., j:j+size] = 0 return img else:", "= np.random.normal(mean, sigma, img.shape).astype(np.float32) return img + gauss else: return", "horizontal_flip(p), gaussian_noise(p, mean, sigma), black_hstripe(p, size=strip_size), black_vstripe(p, size=strip_size) ]) ##########", "vertical_flip(p=0.5): def fc(img): if random.random() < p: return img[..., ::-1,", "horizontal_flip(p=0.5): def fc(img): if random.random() < p: return img[..., ::-1]", "fc(img): if random.random() < p: gauss = np.random.normal(mean, sigma, img.shape).astype(np.float32)", "return fc def vertical_flip(p=0.5): def fc(img): if random.random() < p:", "0 return batch else: return batch return fc def black_hstripe_vid(p=0.5,", "else: return vid return fc def black_vstripe_vid(p=0.5, size=10): def fc(batch):", "fc def black_vstripe(p=0.5, size=10): def fc(img): if random.random() < p:", "# Generic data augmentation class Augmenter: \"\"\" Generic data augmentation", "img + gauss else: return img return fc def black_vstripe(p=0.5,", "Generic data augmentation class Augmenter: \"\"\" Generic data augmentation class", "return img + gauss else: return img return fc def", "########## # Videos # ########## def horizontal_flip_vid(p=0.5): def fc(vid): if", "Videos # ########## def horizontal_flip_vid(p=0.5): def fc(vid): if random.random() <", "aug = op(aug) return aug def __call__(self, img): return self.augment(img)", "return vid return fc def black_vstripe_vid(p=0.5, size=10): def fc(batch): if", "black_hstripe(p, size=strip_size), black_vstripe(p, size=strip_size) ]) ########## # Videos # ##########", "chained operations \"\"\" def __init__(self, ops=[]): if not isinstance(ops, list):", "black_hstripe_vid(p=0.5, size=10): def fc(batch): if random.random() < p: j =", "Use as: aug.augment(img) \"\"\" return Augmenter([ horizontal_flip_vid(p), gaussian_noise(p, mean, sigma),", "black vstripe. Returns: Augmenter object. Use as: aug.augment(img) \"\"\" print(\"Using", "def vertical_flip(p=0.5): def fc(img): if random.random() < p: return img[...,", "def gaussian_noise(p=0.5, mean=0, sigma=0.02): def fc(img): if random.random() < p:", "horizontal_flip_vid(p=0.5): def fc(vid): if random.random() < p: return vid[..., ::-1]", "= 0 return batch else: return batch return fc def", "img return fc def default_augmenter(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default data", "# ########## def horizontal_flip_vid(p=0.5): def fc(vid): if random.random() < p:", "p: j = int(random.random() * (batch.shape[-1]-size)) batch[..., j:j+size] = 0", "op): self.ops.append(op) def augment(self, img): aug = img.copy() for op", "def horizontal_flip_vid(p=0.5): def fc(vid): if random.random() < p: return vid[...,", "< p: j = int(random.random() * (img.shape[0]-size)) img[..., j:j+size, :]", "p: j = int(random.random() * (batch.shape[-2]-size)) batch[..., j:j+size, :] =", "size=10): def fc(img): if random.random() < p: j = int(random.random()", "# ########## def horizontal_flip(p=0.5): def fc(img): if random.random() < p:", "def fc(img): if random.random() < p: return img[..., ::-1] else:", "fc def default_augmenter_vid(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default data augmentation with", "random.random() < p: return vid[..., ::-1] else: return vid return", "\"\"\" Generic data augmentation class with chained operations \"\"\" def", "augmentation with horizontal flip, vertical flip, gaussian noise, black hstripe,", "data augmentation with horizontal flip, gaussian noise, black hstripe, and", "black hstripe, and black vstripe. Returns: Augmenter object. Use as:", "op(aug) return aug def __call__(self, img): return self.augment(img) ########## #", "p: return img[..., ::-1, :] else: return img return fc", "img): aug = img.copy() for op in self.ops: aug =", "def add(self, op): self.ops.append(op) def augment(self, img): aug = img.copy()", "return fc def black_hstripe_vid(p=0.5, size=10): def fc(batch): if random.random() <", "+ gauss else: return img return fc def black_vstripe(p=0.5, size=10):", "= img.copy() for op in self.ops: aug = op(aug) return", "j:j+size, :] = 0 return img else: return img return", "fc(img): if random.random() < p: j = int(random.random() * (img.shape[0]-size))", "mean=0, sigma=0.02): \"\"\"Default data augmentation with horizontal flip, vertical flip,", "def black_hstripe_vid(p=0.5, size=10): def fc(batch): if random.random() < p: j", "strip_size=3, mean=0, sigma=0.02): \"\"\"Default data augmentation with horizontal flip, gaussian", "python # -*- coding: utf-8 -*- import random import numpy", "numpy as np # Generic data augmentation class Augmenter: \"\"\"", "p: return img[..., ::-1] else: return img return fc def", "def augment(self, img): aug = img.copy() for op in self.ops:", "# Images # ########## def horizontal_flip(p=0.5): def fc(img): if random.random()", "fc def vertical_flip(p=0.5): def fc(img): if random.random() < p: return", "object. Use as: aug.augment(img) \"\"\" print(\"Using default image augmenter\") return", "def fc(img): if random.random() < p: return img[..., ::-1, :]", "data augmentation with horizontal flip, vertical flip, gaussian noise, black", "img.copy() for op in self.ops: aug = op(aug) return aug", "else: return img return fc def black_hstripe(p=0.5, size=10): def fc(img):", "image augmenter\") return Augmenter([ horizontal_flip(p), gaussian_noise(p, mean, sigma), black_hstripe(p, size=strip_size),", "batch[..., j:j+size, :] = 0 return batch else: return batch", "ops=[]): if not isinstance(ops, list): print(\"Error: ops must be a", "fc def black_hstripe(p=0.5, size=10): def fc(img): if random.random() < p:", "fc def black_hstripe_vid(p=0.5, size=10): def fc(batch): if random.random() < p:", "aug = img.copy() for op in self.ops: aug = op(aug)", "\"\"\"Default data augmentation with horizontal flip, vertical flip, gaussian noise,", "img[..., j:j+size, :] = 0 return img else: return img", "default image augmenter\") return Augmenter([ horizontal_flip(p), gaussian_noise(p, mean, sigma), black_hstripe(p,", "np # Generic data augmentation class Augmenter: \"\"\" Generic data", "hstripe, and black vstripe. Returns: Augmenter object. Use as: aug.augment(img)", "return fc def default_augmenter_vid(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default data augmentation", "batch return fc def black_hstripe_vid(p=0.5, size=10): def fc(batch): if random.random()", "as: aug.augment(img) \"\"\" return Augmenter([ horizontal_flip_vid(p), gaussian_noise(p, mean, sigma), black_hstripe_vid(p,", "Images # ########## def horizontal_flip(p=0.5): def fc(img): if random.random() <", "def default_augmenter_vid(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default data augmentation with horizontal", "else: return img return fc def gaussian_noise(p=0.5, mean=0, sigma=0.02): def", "img.shape).astype(np.float32) return img + gauss else: return img return fc", "random.random() < p: j = int(random.random() * (img.shape[1]-size)) img[..., j:j+size]", "size=strip_size) ]) ########## # Videos # ########## def horizontal_flip_vid(p=0.5): def", "Generic data augmentation class with chained operations \"\"\" def __init__(self,", "random.random() < p: j = int(random.random() * (batch.shape[-2]-size)) batch[..., j:j+size,", "(batch.shape[-1]-size)) batch[..., j:j+size] = 0 return batch else: return batch", "batch else: return batch return fc def default_augmenter_vid(p=0.5, strip_size=3, mean=0,", "flip, vertical flip, gaussian noise, black hstripe, and black vstripe.", "functions\") quit() self.ops = ops def add(self, op): self.ops.append(op) def", "size=10): def fc(batch): if random.random() < p: j = int(random.random()", "int(random.random() * (batch.shape[-2]-size)) batch[..., j:j+size, :] = 0 return batch", "img else: return img return fc def black_hstripe(p=0.5, size=10): def", "Returns: Augmenter object. Use as: aug.augment(img) \"\"\" return Augmenter([ horizontal_flip_vid(p),", "mean=0, sigma=0.02): \"\"\"Default data augmentation with horizontal flip, gaussian noise,", ":] = 0 return batch else: return batch return fc", "import numpy as np # Generic data augmentation class Augmenter:", "else: return batch return fc def default_augmenter_vid(p=0.5, strip_size=3, mean=0, sigma=0.02):", "########## # Images # ########## def horizontal_flip(p=0.5): def fc(img): if", "augmentation class with chained operations \"\"\" def __init__(self, ops=[]): if", "size=strip_size), black_vstripe(p, size=strip_size) ]) ########## # Videos # ########## def", "black_vstripe(p, size=strip_size) ]) ########## # Videos # ########## def horizontal_flip_vid(p=0.5):", "black vstripe. Returns: Augmenter object. Use as: aug.augment(img) \"\"\" return", "img): return self.augment(img) ########## # Images # ########## def horizontal_flip(p=0.5):", "def black_vstripe_vid(p=0.5, size=10): def fc(batch): if random.random() < p: j", "fc def default_augmenter(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default data augmentation with", "random.random() < p: gauss = np.random.normal(mean, sigma, img.shape).astype(np.float32) return img", ":] = 0 return img else: return img return fc", "= int(random.random() * (batch.shape[-1]-size)) batch[..., j:j+size] = 0 return batch", "self.augment(img) ########## # Images # ########## def horizontal_flip(p=0.5): def fc(img):", "if random.random() < p: j = int(random.random() * (batch.shape[-2]-size)) batch[...,", "__init__(self, ops=[]): if not isinstance(ops, list): print(\"Error: ops must be", "list): print(\"Error: ops must be a list of functions\") quit()", "and black vstripe. Returns: Augmenter object. Use as: aug.augment(img) \"\"\"", "::-1, :] else: return img return fc def gaussian_noise(p=0.5, mean=0,", "return fc def black_vstripe_vid(p=0.5, size=10): def fc(batch): if random.random() <", "def fc(img): if random.random() < p: j = int(random.random() *", "0 return img else: return img return fc def default_augmenter(p=0.5,", "data augmentation class with chained operations \"\"\" def __init__(self, ops=[]):", "* (img.shape[0]-size)) img[..., j:j+size, :] = 0 return img else:", "< p: return img[..., ::-1] else: return img return fc", "return img else: return img return fc def default_augmenter(p=0.5, strip_size=3,", "operations \"\"\" def __init__(self, ops=[]): if not isinstance(ops, list): print(\"Error:", "return img[..., ::-1, :] else: return img return fc def", "random.random() < p: j = int(random.random() * (batch.shape[-1]-size)) batch[..., j:j+size]", "########## def horizontal_flip_vid(p=0.5): def fc(vid): if random.random() < p: return", "if random.random() < p: j = int(random.random() * (img.shape[0]-size)) img[...,", "sigma=0.02): def fc(img): if random.random() < p: gauss = np.random.normal(mean,", "< p: j = int(random.random() * (batch.shape[-2]-size)) batch[..., j:j+size, :]", "p: gauss = np.random.normal(mean, sigma, img.shape).astype(np.float32) return img + gauss", "j = int(random.random() * (img.shape[0]-size)) img[..., j:j+size, :] = 0", "return batch return fc def default_augmenter_vid(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default", "gaussian noise, black hstripe, and black vstripe. Returns: Augmenter object.", "Augmenter object. Use as: aug.augment(img) \"\"\" print(\"Using default image augmenter\")", "fc(img): if random.random() < p: j = int(random.random() * (img.shape[1]-size))", "* (batch.shape[-2]-size)) batch[..., j:j+size, :] = 0 return batch else:", "else: return img return fc def black_vstripe(p=0.5, size=10): def fc(img):", "black_vstripe_vid(p=0.5, size=10): def fc(batch): if random.random() < p: j =", "strip_size=3, mean=0, sigma=0.02): \"\"\"Default data augmentation with horizontal flip, vertical", "return img return fc def black_vstripe(p=0.5, size=10): def fc(img): if", "def fc(vid): if random.random() < p: return vid[..., ::-1] else:", "= 0 return img else: return img return fc def", "batch[..., j:j+size] = 0 return batch else: return batch return", "(batch.shape[-2]-size)) batch[..., j:j+size, :] = 0 return batch else: return", "(img.shape[1]-size)) img[..., j:j+size] = 0 return img else: return img", "noise, black hstripe, and black vstripe. Returns: Augmenter object. Use", "import random import numpy as np # Generic data augmentation", "= int(random.random() * (img.shape[0]-size)) img[..., j:j+size, :] = 0 return", "return fc def black_hstripe(p=0.5, size=10): def fc(img): if random.random() <", "gauss = np.random.normal(mean, sigma, img.shape).astype(np.float32) return img + gauss else:", "fc(batch): if random.random() < p: j = int(random.random() * (batch.shape[-2]-size))", "p: j = int(random.random() * (img.shape[0]-size)) img[..., j:j+size, :] =", "as np # Generic data augmentation class Augmenter: \"\"\" Generic", "print(\"Using default image augmenter\") return Augmenter([ horizontal_flip(p), gaussian_noise(p, mean, sigma),", "return Augmenter([ horizontal_flip(p), gaussian_noise(p, mean, sigma), black_hstripe(p, size=strip_size), black_vstripe(p, size=strip_size)", "random import numpy as np # Generic data augmentation class", "gaussian_noise(p=0.5, mean=0, sigma=0.02): def fc(img): if random.random() < p: gauss", "self.ops.append(op) def augment(self, img): aug = img.copy() for op in", "random.random() < p: j = int(random.random() * (img.shape[0]-size)) img[..., j:j+size,", "img[..., ::-1] else: return img return fc def vertical_flip(p=0.5): def", "Augmenter([ horizontal_flip(p), gaussian_noise(p, mean, sigma), black_hstripe(p, size=strip_size), black_vstripe(p, size=strip_size) ])", "random.random() < p: return img[..., ::-1] else: return img return", "\"\"\" print(\"Using default image augmenter\") return Augmenter([ horizontal_flip(p), gaussian_noise(p, mean,", "= op(aug) return aug def __call__(self, img): return self.augment(img) ##########", "j = int(random.random() * (img.shape[1]-size)) img[..., j:j+size] = 0 return", "def __init__(self, ops=[]): if not isinstance(ops, list): print(\"Error: ops must", "\"\"\" def __init__(self, ops=[]): if not isinstance(ops, list): print(\"Error: ops", "def black_hstripe(p=0.5, size=10): def fc(img): if random.random() < p: j", "return batch return fc def black_hstripe_vid(p=0.5, size=10): def fc(batch): if", "Augmenter([ horizontal_flip_vid(p), gaussian_noise(p, mean, sigma), black_hstripe_vid(p, size=strip_size), black_vstripe_vid(p, size=strip_size) ])", "mean=0, sigma=0.02): def fc(img): if random.random() < p: gauss =", "* (img.shape[1]-size)) img[..., j:j+size] = 0 return img else: return", "= int(random.random() * (img.shape[1]-size)) img[..., j:j+size] = 0 return img", "return img[..., ::-1] else: return img return fc def vertical_flip(p=0.5):", "must be a list of functions\") quit() self.ops = ops", "return batch else: return batch return fc def black_hstripe_vid(p=0.5, size=10):", "with horizontal flip, gaussian noise, black hstripe, and black vstripe.", "augmentation with horizontal flip, gaussian noise, black hstripe, and black", "black_vstripe(p=0.5, size=10): def fc(img): if random.random() < p: j =", "quit() self.ops = ops def add(self, op): self.ops.append(op) def augment(self,", ":] else: return img return fc def gaussian_noise(p=0.5, mean=0, sigma=0.02):", "horizontal flip, gaussian noise, black hstripe, and black vstripe. Returns:", "(img.shape[0]-size)) img[..., j:j+size, :] = 0 return img else: return", "self.ops = ops def add(self, op): self.ops.append(op) def augment(self, img):", "def __call__(self, img): return self.augment(img) ########## # Images # ##########", "utf-8 -*- import random import numpy as np # Generic", "batch return fc def default_augmenter_vid(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default data", "int(random.random() * (img.shape[0]-size)) img[..., j:j+size, :] = 0 return img", "sigma=0.02): \"\"\"Default data augmentation with horizontal flip, gaussian noise, black", "= ops def add(self, op): self.ops.append(op) def augment(self, img): aug", "return self.augment(img) ########## # Images # ########## def horizontal_flip(p=0.5): def", "def fc(img): if random.random() < p: gauss = np.random.normal(mean, sigma,", "data augmentation class Augmenter: \"\"\" Generic data augmentation class with", "def default_augmenter(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default data augmentation with horizontal", "sigma), black_hstripe(p, size=strip_size), black_vstripe(p, size=strip_size) ]) ########## # Videos #", "sigma=0.02): \"\"\"Default data augmentation with horizontal flip, vertical flip, gaussian", "fc(batch): if random.random() < p: j = int(random.random() * (batch.shape[-1]-size))", "gaussian_noise(p, mean, sigma), black_hstripe(p, size=strip_size), black_vstripe(p, size=strip_size) ]) ########## #", "img return fc def black_hstripe(p=0.5, size=10): def fc(img): if random.random()", "else: return img return fc def vertical_flip(p=0.5): def fc(img): if", "sigma, img.shape).astype(np.float32) return img + gauss else: return img return", "augmentation class Augmenter: \"\"\" Generic data augmentation class with chained", "int(random.random() * (batch.shape[-1]-size)) batch[..., j:j+size] = 0 return batch else:", "\"\"\" return Augmenter([ horizontal_flip_vid(p), gaussian_noise(p, mean, sigma), black_hstripe_vid(p, size=strip_size), black_vstripe_vid(p,", "class with chained operations \"\"\" def __init__(self, ops=[]): if not", "0 return img else: return img return fc def black_hstripe(p=0.5,", "return vid[..., ::-1] else: return vid return fc def black_vstripe_vid(p=0.5,", "vstripe. Returns: Augmenter object. Use as: aug.augment(img) \"\"\" return Augmenter([", "__call__(self, img): return self.augment(img) ########## # Images # ########## def", "else: return img return fc def default_augmenter(p=0.5, strip_size=3, mean=0, sigma=0.02):", "augmenter\") return Augmenter([ horizontal_flip(p), gaussian_noise(p, mean, sigma), black_hstripe(p, size=strip_size), black_vstripe(p,", "if random.random() < p: gauss = np.random.normal(mean, sigma, img.shape).astype(np.float32) return", "Use as: aug.augment(img) \"\"\" print(\"Using default image augmenter\") return Augmenter([", "else: return batch return fc def black_hstripe_vid(p=0.5, size=10): def fc(batch):", "default_augmenter(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default data augmentation with horizontal flip,", "########## def horizontal_flip(p=0.5): def fc(img): if random.random() < p: return", "return fc def default_augmenter(p=0.5, strip_size=3, mean=0, sigma=0.02): \"\"\"Default data augmentation", "return aug def __call__(self, img): return self.augment(img) ########## # Images", "< p: return vid[..., ::-1] else: return vid return fc", "j = int(random.random() * (batch.shape[-2]-size)) batch[..., j:j+size, :] = 0", "np.random.normal(mean, sigma, img.shape).astype(np.float32) return img + gauss else: return img", "img[..., ::-1, :] else: return img return fc def gaussian_noise(p=0.5,", "be a list of functions\") quit() self.ops = ops def", "mean, sigma), black_hstripe(p, size=strip_size), black_vstripe(p, size=strip_size) ]) ########## # Videos", "with chained operations \"\"\" def __init__(self, ops=[]): if not isinstance(ops,", "def black_vstripe(p=0.5, size=10): def fc(img): if random.random() < p: j", "print(\"Error: ops must be a list of functions\") quit() self.ops", "if random.random() < p: j = int(random.random() * (batch.shape[-1]-size)) batch[...,", "not isinstance(ops, list): print(\"Error: ops must be a list of", "img return fc def gaussian_noise(p=0.5, mean=0, sigma=0.02): def fc(img): if", "for op in self.ops: aug = op(aug) return aug def", "if random.random() < p: return vid[..., ::-1] else: return vid", "]) ########## # Videos # ########## def horizontal_flip_vid(p=0.5): def fc(vid):", "a list of functions\") quit() self.ops = ops def add(self,", "< p: gauss = np.random.normal(mean, sigma, img.shape).astype(np.float32) return img +", "\"\"\"Default data augmentation with horizontal flip, gaussian noise, black hstripe,", "batch else: return batch return fc def black_hstripe_vid(p=0.5, size=10): def", "vertical flip, gaussian noise, black hstripe, and black vstripe. Returns:", "def fc(batch): if random.random() < p: j = int(random.random() *" ]
[ "geom = obj.location.geometry.centroid elif hasattr(obj, \"geom\"): geom = obj.geom elif", "NUTS3 (kraje) and LAU1 (okresy) region according to name \"\"\"", "#print(feature) data[\"features\"].append(feature) return data # Register your models here. admin.site.register(Lau1,", "= 6430000 #readonly_fields = (\"code\", \"name\",) class NUTS3Admin(LeafletGeoAdmin): default_zoom =", "\"name\",) class RailwayStationAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat", "= Lau1 class NUTS3Filter(admin.SimpleListFilter): \"\"\"Filter for admin interface of NUTS3", "class LAU1Admin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat =", "class RailwayStationAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat =", "len(queryset): result = self._search_lay1_nuts3_by_name( queryset, search_term) if len(result) == 0", "= [float(x) for x in search_term.split(\"<>\")] filtered = queryset.filter( areal__area__total__gte=area_min,", "add GeoJSON encoded data for the queryset \"\"\" extra_context =", "encoded data for the queryset \"\"\" extra_context = extra_context or", "None if hasattr(obj, \"location_set\"): multipoint = geos.MultiPoint( [loc.address.coordinates for loc", "response.context_data.update(extra_context) return response def as_geojson(self, queryset): if self.geojson_attributes: attributes =", "= 1730000 default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class", "} for attribute in attributes: if hasattr(obj, attribute): value =", "import Lau1 from .models import Nuts3 from .models import Airport", "(\"code\", \"name\",) class LAU1Admin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000", "from django.urls import reverse from django.utils.translation import ugettext_lazy as _", "= None if hasattr(obj, \"location_set\"): multipoint = geos.MultiPoint( [loc.address.coordinates for", "hasattr(obj, \"geom\"): geom = obj.geom elif hasattr(obj, \"address\"): geom =", "filtered = queryset.filter( areal__area__total__gte=area_min, areal__area__total__lte=area_max) return filtered def changelist_view(self, request,", "return filtered def changelist_view(self, request, extra_context=None): \"\"\"Adjust change list view", "leaflet.admin import LeafletGeoAdmin, LeafletGeoAdminMixin from .models import Lau1 from .models", "(kraje) and LAU1 (okresy) region according to name \"\"\" filtered", "\"name\"): title = obj.name if type(obj.pk) == uuid.UUID: id =", "search_term.split(\"<>\")] filtered = queryset.filter( areal__area__total__gte=area_min, areal__area__total__lte=area_max) return filtered def changelist_view(self,", "from .models import Nuts3 from .models import Airport from .models", "queryset.none() for cls in (Lau1, Nuts3): objs = cls.objects.filter(name__startswith=search_term) for", "= super().changelist_view( request, extra_context=extra_context, ) if hasattr(response, \"context_data\"): filtered_query_set =", "\"location_set\"): multipoint = geos.MultiPoint( [loc.address.coordinates for loc in obj.location_set.all()]) geom", "geos.MultiPoint( [loc.address.coordinates for loc in obj.location_set.all()]) geom = multipoint.centroid elif", "if type(obj.pk) == uuid.UUID: id = str(obj.pk) else: id =", "class NUTS3Filter(admin.SimpleListFilter): \"\"\"Filter for admin interface of NUTS3 regions (Kraje)", "id = str(obj.pk) else: id = obj.pk feature = {", "admin.site.register(Lau1, LAU1Admin) admin.site.register(Nuts3, NUTS3Admin) admin.site.register(Road, RoadAdmin) admin.site.register(PublicTransportStop, PublicTransportStopAdmin) admin.site.register(RailwayStation, RailwayStationAdmin)", "title = _('NUTS3 regions') parameter_name = 'nuts3#' def lookups(self, request,", "return obj.location.address.city else: return \", \".join( [l.__str__() for l in", "feature[attribute] = str(value) else: feature[attribute] = value #print(feature) data[\"features\"].append(feature) return", "import json class AirportAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000", "= obj.name if type(obj.pk) == uuid.UUID: id = str(obj.pk) else:", "area_max = [float(x) for x in search_term.split(\"<>\")] filtered = queryset.filter(", "def as_geojson(self, queryset): if self.geojson_attributes: attributes = self.geojson_attributes else: attributes", "\"\"\"Adjust change list view add GeoJSON encoded data for the", "|= objects return filtered def _search_area(self, queryset, search_term): \"\"\"Search all", "} for obj in queryset: geom = None if hasattr(obj,", "self._search_lay1_nuts3_by_name( queryset, search_term) if len(result) == 0 or len(result) ==", "self.geojson_attributes: attributes = self.geojson_attributes else: attributes = [] data =", "if hasattr(obj, attribute): value = getattr(obj, attribute.__str__()) if type(value) ==", "return response def as_geojson(self, queryset): if self.geojson_attributes: attributes = self.geojson_attributes", "\"id\": id } for attribute in attributes: if hasattr(obj, attribute):", "import Nuts3 from .models import Airport from .models import Road", "= (\"code\", \"name\",) class LAU1Admin(LeafletGeoAdmin): default_zoom = 7 default_lon =", "MIN < area.total < MAX \"\"\" filtered = queryset.none() if", "6430000 #readonly_fields = (\"code\", \"name\",) class NUTS3AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model =", "def changelist_view(self, request, extra_context=None): \"\"\"Adjust change list view add GeoJSON", "uuid.UUID: feature[attribute] = str(value) else: feature[attribute] = value #print(feature) data[\"features\"].append(feature)", "use_distinct = super( ArealFieldAdmin, self).get_search_results( request, queryset, search_term) if search_term:", "self._search_area(queryset, search_term) return (result, use_distinct) def _search_lay1_nuts3_by_name(self, queryset, search_term): \"\"\"Search", "cls.objects.filter(name__startswith=search_term) for o in objs: objects = queryset.filter( location__geometry__intersects=o.geometry) filtered", "geojson_attributes = [] def get_place(self, obj): if hasattr(obj.location, \"address\") and", "val = self.value() if val: nuts3 = Nuts3.objects.get(pk=val) results =", "in search_term.split(\"<>\")] filtered = queryset.filter( areal__area__total__gte=area_min, areal__area__total__lte=area_max) return filtered def", "get_place(self, obj): if hasattr(obj.location, \"address\") and \\ obj.location.address is not", "Nuts3 from .models import Airport from .models import Road from", "\"type\": \"Feature\", \"properties\": { \"name\": title, \"object_url\": reverse('admin:{}_{}_change'.format( obj._meta.app_label, obj._meta.model_name),", "from .models import Airport from .models import Road from .models", "= super( ArealFieldAdmin, self).get_search_results( request, queryset, search_term) if search_term: if", "= extra_context or {} response = super().changelist_view( request, extra_context=extra_context, )", "attribute): value = getattr(obj, attribute.__str__()) if type(value) == uuid.UUID: feature[attribute]", "self.value() if val: nuts3 = Nuts3.objects.get(pk=val) results = queryset.filter( location__geometry__intersects=nuts3.geometry)", "hasattr(obj, \"address\"): geom = obj.address.coordinates if geom: title = None", "admin.site.register(Nuts3, NUTS3Admin) admin.site.register(Road, RoadAdmin) admin.site.register(PublicTransportStop, PublicTransportStopAdmin) admin.site.register(RailwayStation, RailwayStationAdmin) admin.site.register(Airport, AirportAdmin)", "def queryset(self, request, queryset): val = self.value() if val: nuts3", "Nuts3.objects.all() return ( (obj.id, obj.name) for obj in nuts3 )", "queryset return results class ArealFieldAdmin(nested_admin.NestedModelAdmin): geojson_attributes = [] def get_place(self,", "super().changelist_view( request, extra_context=extra_context, ) if hasattr(response, \"context_data\"): filtered_query_set = response.context_data[\"cl\"].queryset", "= self.value() if val: nuts3 = Nuts3.objects.get(pk=val) results = queryset.filter(", "1730000 default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class NUTS3Admin(LeafletGeoAdmin):", "if self.geojson_attributes: attributes = self.geojson_attributes else: attributes = [] data", "list view add GeoJSON encoded data for the queryset \"\"\"", "default_lon = 1730000 default_lat = 6430000 #readonly_fields = (\"code\", \"name\",)", "django.contrib.gis import geos from leaflet.admin import LeafletGeoAdmin, LeafletGeoAdminMixin from .models", "(\"code\", \"name\",) class NUTS3Admin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000", "\"type\": \"FeatureCollection\", \"features\": [] } for obj in queryset: geom", "from .models import Road from .models import PublicTransportStop from .models", "for cls in (Lau1, Nuts3): objs = cls.objects.filter(name__startswith=search_term) for o", "LAU1AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Lau1 class NUTS3Filter(admin.SimpleListFilter): \"\"\"Filter for admin", "all features, where MIN < area.total < MAX \"\"\" filtered", "json class AirportAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat", "ugettext_lazy as _ import nested_admin import uuid import json class", "}, \"geometry\": json.loads(geom.json), \"id\": id } for attribute in attributes:", "return results class ArealFieldAdmin(nested_admin.NestedModelAdmin): geojson_attributes = [] def get_place(self, obj):", "return \", \".join( [l.__str__() for l in Nuts3.objects.filter( geometry__intersects=obj.location.geometry)]) def", "NUTS3AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Nuts3 class LAU1AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model =", "(Kraje) \"\"\" title = _('NUTS3 regions') parameter_name = 'nuts3#' def", "NUTS3 (by name) search and area size search (using `<>`", "from .models import RailwayStation from django.urls import reverse from django.utils.translation", "search (using `<>` operator) \"\"\" result, use_distinct = super( ArealFieldAdmin,", ") if hasattr(response, \"context_data\"): filtered_query_set = response.context_data[\"cl\"].queryset extra_context['objects_data'] = \\", "= { \"type\": \"Feature\", \"properties\": { \"name\": title, \"object_url\": reverse('admin:{}_{}_change'.format(", "[] data = { \"type\": \"FeatureCollection\", \"features\": [] } for", "< area.total < MAX \"\"\" filtered = queryset.none() if search_term.find(\"<>\")", "loc in obj.location_set.all()]) geom = multipoint.centroid elif hasattr(obj, \"location\"): geom", "= queryset.filter( areal__area__total__gte=area_min, areal__area__total__lte=area_max) return filtered def changelist_view(self, request, extra_context=None):", "nuts3 = Nuts3.objects.get(pk=val) results = queryset.filter( location__geometry__intersects=nuts3.geometry) else: results =", "where MIN < area.total < MAX \"\"\" filtered = queryset.none()", "class LAU1AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Lau1 class NUTS3Filter(admin.SimpleListFilter): \"\"\"Filter for", "from .models import Lau1 from .models import Nuts3 from .models", "(result, use_distinct) def _search_lay1_nuts3_by_name(self, queryset, search_term): \"\"\"Search NUTS3 (kraje) and", "6430000 #readonly_fields = (\"code\", \"name\",) class RailwayStationAdmin(LeafletGeoAdmin): default_zoom = 7", "for admin interface of NUTS3 regions (Kraje) \"\"\" title =", "= queryset.none() if search_term.find(\"<>\") > -1: area_min, area_max = [float(x)", "len(queryset): result = self._search_area(queryset, search_term) return (result, use_distinct) def _search_lay1_nuts3_by_name(self,", "reverse from django.utils.translation import ugettext_lazy as _ import nested_admin import", "def _search_lay1_nuts3_by_name(self, queryset, search_term): \"\"\"Search NUTS3 (kraje) and LAU1 (okresy)", "else: id = obj.pk feature = { \"type\": \"Feature\", \"properties\":", "queryset(self, request, queryset): val = self.value() if val: nuts3 =", "data for the queryset \"\"\" extra_context = extra_context or {}", "= queryset return results class ArealFieldAdmin(nested_admin.NestedModelAdmin): geojson_attributes = [] def", "geos from leaflet.admin import LeafletGeoAdmin, LeafletGeoAdminMixin from .models import Lau1", "'nuts3#' def lookups(self, request, model_admin): nuts3 = Nuts3.objects.all() return (", "queryset): val = self.value() if val: nuts3 = Nuts3.objects.get(pk=val) results", "if hasattr(obj, \"location_set\"): multipoint = geos.MultiPoint( [loc.address.coordinates for loc in", "default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class LAU1Admin(LeafletGeoAdmin): default_zoom", ") def queryset(self, request, queryset): val = self.value() if val:", "search_term): \"\"\"Search NUTS3 (kraje) and LAU1 (okresy) region according to", ".models import PublicTransportStop from .models import RailwayStation from django.urls import", "l in Nuts3.objects.filter( geometry__intersects=obj.location.geometry)]) def get_search_results(self, request, queryset, search_term): \"\"\"Add", "data = { \"type\": \"FeatureCollection\", \"features\": [] } for obj", "filtered_query_set = response.context_data[\"cl\"].queryset extra_context['objects_data'] = \\ json.dumps(self.as_geojson(filtered_query_set)) response.context_data.update(extra_context) return response", "(by name) search and area size search (using `<>` operator)", "= (\"code\", \"name\",) class PublicTransportStopAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon =", "hasattr(obj, \"location_set\"): multipoint = geos.MultiPoint( [loc.address.coordinates for loc in obj.location_set.all()])", "model = Lau1 class NUTS3Filter(admin.SimpleListFilter): \"\"\"Filter for admin interface of", "attribute in attributes: if hasattr(obj, attribute): value = getattr(obj, attribute.__str__())", "1730000 default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class RailwayStationAdmin(LeafletGeoAdmin):", "1730000 default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class PublicTransportStopAdmin(LeafletGeoAdmin):", "elif hasattr(obj, \"location\"): geom = obj.location.geometry.centroid elif hasattr(obj, \"geom\"): geom", "`<>` operator) \"\"\" result, use_distinct = super( ArealFieldAdmin, self).get_search_results( request,", "= (\"code\", \"name\",) class NUTS3Admin(LeafletGeoAdmin): default_zoom = 7 default_lon =", "class NUTS3Admin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat =", "Nuts3): objs = cls.objects.filter(name__startswith=search_term) for o in objs: objects =", "args=(obj.pk,)), }, \"geometry\": json.loads(geom.json), \"id\": id } for attribute in", "= obj.pk feature = { \"type\": \"Feature\", \"properties\": { \"name\":", "[] } for obj in queryset: geom = None if", "objects return filtered def _search_area(self, queryset, search_term): \"\"\"Search all features,", "in nuts3 ) def queryset(self, request, queryset): val = self.value()", "default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class NUTS3Admin(LeafletGeoAdmin): default_zoom", "as _ import nested_admin import uuid import json class AirportAdmin(LeafletGeoAdmin):", "= multipoint.centroid elif hasattr(obj, \"location\"): geom = obj.location.geometry.centroid elif hasattr(obj,", "= (\"code\", \"name\",) class NUTS3AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Nuts3 class", ".models import RailwayStation from django.urls import reverse from django.utils.translation import", "= None if hasattr(obj, \"title\"): title = obj.title elif hasattr(obj,", "id = obj.pk feature = { \"type\": \"Feature\", \"properties\": {", "#readonly_fields = (\"code\", \"name\",) class NUTS3Admin(LeafletGeoAdmin): default_zoom = 7 default_lon", "if val: nuts3 = Nuts3.objects.get(pk=val) results = queryset.filter( location__geometry__intersects=nuts3.geometry) else:", "areal__area__total__gte=area_min, areal__area__total__lte=area_max) return filtered def changelist_view(self, request, extra_context=None): \"\"\"Adjust change", "\"properties\": { \"name\": title, \"object_url\": reverse('admin:{}_{}_change'.format( obj._meta.app_label, obj._meta.model_name), args=(obj.pk,)), },", "not None: return obj.location.address.city else: return \", \".join( [l.__str__() for", "\\ obj.location.address is not None: return obj.location.address.city else: return \",", "elif hasattr(obj, \"geom\"): geom = obj.geom elif hasattr(obj, \"address\"): geom", "{ \"name\": title, \"object_url\": reverse('admin:{}_{}_change'.format( obj._meta.app_label, obj._meta.model_name), args=(obj.pk,)), }, \"geometry\":", "default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class RoadAdmin(LeafletGeoAdmin): default_zoom", "LAU1 (okresy) region according to name \"\"\" filtered = queryset.none()", "import nested_admin import uuid import json class AirportAdmin(LeafletGeoAdmin): default_zoom =", "\"\"\"Search NUTS3 (kraje) and LAU1 (okresy) region according to name", "multipoint.centroid elif hasattr(obj, \"location\"): geom = obj.location.geometry.centroid elif hasattr(obj, \"geom\"):", "queryset.filter( location__geometry__intersects=o.geometry) filtered |= objects return filtered def _search_area(self, queryset,", "area.total < MAX \"\"\" filtered = queryset.none() if search_term.find(\"<>\") >", "return filtered def _search_area(self, queryset, search_term): \"\"\"Search all features, where", "== uuid.UUID: id = str(obj.pk) else: id = obj.pk feature", "(\"code\", \"name\",) class RailwayStationAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000", "LAU1Admin) admin.site.register(Nuts3, NUTS3Admin) admin.site.register(Road, RoadAdmin) admin.site.register(PublicTransportStop, PublicTransportStopAdmin) admin.site.register(RailwayStation, RailwayStationAdmin) admin.site.register(Airport,", ".models import Nuts3 from .models import Airport from .models import", "[loc.address.coordinates for loc in obj.location_set.all()]) geom = multipoint.centroid elif hasattr(obj,", "PublicTransportStopAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat = 6430000", "6430000 #readonly_fields = (\"code\", \"name\",) class LAU1Admin(LeafletGeoAdmin): default_zoom = 7", "# Register your models here. admin.site.register(Lau1, LAU1Admin) admin.site.register(Nuts3, NUTS3Admin) admin.site.register(Road,", ".models import Road from .models import PublicTransportStop from .models import", "#readonly_fields = (\"code\", \"name\",) class LAU1Admin(LeafletGeoAdmin): default_zoom = 7 default_lon", "request, queryset, search_term) if search_term: if len(result) == 0 or", "extra_context['objects_data'] = \\ json.dumps(self.as_geojson(filtered_query_set)) response.context_data.update(extra_context) return response def as_geojson(self, queryset):", "== len(queryset): result = self._search_area(queryset, search_term) return (result, use_distinct) def", "= self.geojson_attributes else: attributes = [] data = { \"type\":", "\"title\"): title = obj.title elif hasattr(obj, \"name\"): title = obj.name", ".models import Airport from .models import Road from .models import", "of NUTS3 regions (Kraje) \"\"\" title = _('NUTS3 regions') parameter_name", "RoadAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat = 6430000", "from .models import PublicTransportStop from .models import RailwayStation from django.urls", "o in objs: objects = queryset.filter( location__geometry__intersects=o.geometry) filtered |= objects", "\"context_data\"): filtered_query_set = response.context_data[\"cl\"].queryset extra_context['objects_data'] = \\ json.dumps(self.as_geojson(filtered_query_set)) response.context_data.update(extra_context) return", "response def as_geojson(self, queryset): if self.geojson_attributes: attributes = self.geojson_attributes else:", "search_term) return (result, use_distinct) def _search_lay1_nuts3_by_name(self, queryset, search_term): \"\"\"Search NUTS3", "Nuts3.objects.get(pk=val) results = queryset.filter( location__geometry__intersects=nuts3.geometry) else: results = queryset return", "= str(value) else: feature[attribute] = value #print(feature) data[\"features\"].append(feature) return data", "( (obj.id, obj.name) for obj in nuts3 ) def queryset(self,", "hasattr(obj.location, \"address\") and \\ obj.location.address is not None: return obj.location.address.city", "geom = None if hasattr(obj, \"location_set\"): multipoint = geos.MultiPoint( [loc.address.coordinates", "queryset, search_term) if search_term: if len(result) == 0 or len(result)", "obj.pk feature = { \"type\": \"Feature\", \"properties\": { \"name\": title,", "= self._search_lay1_nuts3_by_name( queryset, search_term) if len(result) == 0 or len(result)", "nested_admin import uuid import json class AirportAdmin(LeafletGeoAdmin): default_zoom = 7", "\"\"\" title = _('NUTS3 regions') parameter_name = 'nuts3#' def lookups(self,", "getattr(obj, attribute.__str__()) if type(value) == uuid.UUID: feature[attribute] = str(value) else:", "\"\"\" filtered = queryset.none() if search_term.find(\"<>\") > -1: area_min, area_max", "\"\"\"Search all features, where MIN < area.total < MAX \"\"\"", "to name \"\"\" filtered = queryset.none() for cls in (Lau1,", "== 0 or len(result) == len(queryset): result = self._search_lay1_nuts3_by_name( queryset,", "\", \".join( [l.__str__() for l in Nuts3.objects.filter( geometry__intersects=obj.location.geometry)]) def get_search_results(self,", "ArealFieldAdmin(nested_admin.NestedModelAdmin): geojson_attributes = [] def get_place(self, obj): if hasattr(obj.location, \"address\")", "if search_term: if len(result) == 0 or len(result) == len(queryset):", "Lau1 from .models import Nuts3 from .models import Airport from", "for obj in nuts3 ) def queryset(self, request, queryset): val", "<reponame>CzechInvest/ciis from django.contrib import admin from django.contrib.gis import geos from", "search_term: if len(result) == 0 or len(result) == len(queryset): result", "your models here. admin.site.register(Lau1, LAU1Admin) admin.site.register(Nuts3, NUTS3Admin) admin.site.register(Road, RoadAdmin) admin.site.register(PublicTransportStop,", "area size search (using `<>` operator) \"\"\" result, use_distinct =", "(using `<>` operator) \"\"\" result, use_distinct = super( ArealFieldAdmin, self).get_search_results(", "use_distinct) def _search_lay1_nuts3_by_name(self, queryset, search_term): \"\"\"Search NUTS3 (kraje) and LAU1", "admin interface of NUTS3 regions (Kraje) \"\"\" title = _('NUTS3", "str(obj.pk) else: id = obj.pk feature = { \"type\": \"Feature\",", "= geos.MultiPoint( [loc.address.coordinates for loc in obj.location_set.all()]) geom = multipoint.centroid", "result = self._search_lay1_nuts3_by_name( queryset, search_term) if len(result) == 0 or", "= (\"code\", \"name\",) class RoadAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon =", "import admin from django.contrib.gis import geos from leaflet.admin import LeafletGeoAdmin,", "request, model_admin): nuts3 = Nuts3.objects.all() return ( (obj.id, obj.name) for", "import uuid import json class AirportAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon", "regions (Kraje) \"\"\" title = _('NUTS3 regions') parameter_name = 'nuts3#'", "[float(x) for x in search_term.split(\"<>\")] filtered = queryset.filter( areal__area__total__gte=area_min, areal__area__total__lte=area_max)", "obj.location.address is not None: return obj.location.address.city else: return \", \".join(", "area_min, area_max = [float(x) for x in search_term.split(\"<>\")] filtered =", "6430000 #readonly_fields = (\"code\", \"name\",) class PublicTransportStopAdmin(LeafletGeoAdmin): default_zoom = 7", "from django.utils.translation import ugettext_lazy as _ import nested_admin import uuid", "= { \"type\": \"FeatureCollection\", \"features\": [] } for obj in", "else: return \", \".join( [l.__str__() for l in Nuts3.objects.filter( geometry__intersects=obj.location.geometry)])", "extra_context or {} response = super().changelist_view( request, extra_context=extra_context, ) if", "in queryset: geom = None if hasattr(obj, \"location_set\"): multipoint =", "1730000 default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class LAU1Admin(LeafletGeoAdmin):", "hasattr(obj, \"location\"): geom = obj.location.geometry.centroid elif hasattr(obj, \"geom\"): geom =", "search_term): \"\"\"Add NUTS3 (by name) search and area size search", "regions') parameter_name = 'nuts3#' def lookups(self, request, model_admin): nuts3 =", "obj.location_set.all()]) geom = multipoint.centroid elif hasattr(obj, \"location\"): geom = obj.location.geometry.centroid", "None: return obj.location.address.city else: return \", \".join( [l.__str__() for l", "len(result) == len(queryset): result = self._search_area(queryset, search_term) return (result, use_distinct)", "features, where MIN < area.total < MAX \"\"\" filtered =", "geometry__intersects=obj.location.geometry)]) def get_search_results(self, request, queryset, search_term): \"\"\"Add NUTS3 (by name)", "extra_context = extra_context or {} response = super().changelist_view( request, extra_context=extra_context,", "according to name \"\"\" filtered = queryset.none() for cls in", "uuid import json class AirportAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon =", "from leaflet.admin import LeafletGeoAdmin, LeafletGeoAdminMixin from .models import Lau1 from", "or len(result) == len(queryset): result = self._search_lay1_nuts3_by_name( queryset, search_term) if", "= str(obj.pk) else: id = obj.pk feature = { \"type\":", "objects = queryset.filter( location__geometry__intersects=o.geometry) filtered |= objects return filtered def", "Nuts3.objects.filter( geometry__intersects=obj.location.geometry)]) def get_search_results(self, request, queryset, search_term): \"\"\"Add NUTS3 (by", "obj.name if type(obj.pk) == uuid.UUID: id = str(obj.pk) else: id", "return data # Register your models here. admin.site.register(Lau1, LAU1Admin) admin.site.register(Nuts3,", "or len(result) == len(queryset): result = self._search_area(queryset, search_term) return (result,", "default_zoom = 7 default_lon = 1730000 default_lat = 6430000 #readonly_fields", "= [] data = { \"type\": \"FeatureCollection\", \"features\": [] }", "\"name\",) class PublicTransportStopAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat", "= cls.objects.filter(name__startswith=search_term) for o in objs: objects = queryset.filter( location__geometry__intersects=o.geometry)", "filtered = queryset.none() if search_term.find(\"<>\") > -1: area_min, area_max =", "request, queryset, search_term): \"\"\"Add NUTS3 (by name) search and area", "location__geometry__intersects=o.geometry) filtered |= objects return filtered def _search_area(self, queryset, search_term):", "= 6430000 #readonly_fields = (\"code\", \"name\",) class RailwayStationAdmin(LeafletGeoAdmin): default_zoom =", "self.geojson_attributes else: attributes = [] data = { \"type\": \"FeatureCollection\",", "changelist_view(self, request, extra_context=None): \"\"\"Adjust change list view add GeoJSON encoded", "location__geometry__intersects=nuts3.geometry) else: results = queryset return results class ArealFieldAdmin(nested_admin.NestedModelAdmin): geojson_attributes", "\"geom\"): geom = obj.geom elif hasattr(obj, \"address\"): geom = obj.address.coordinates", "in objs: objects = queryset.filter( location__geometry__intersects=o.geometry) filtered |= objects return", "nuts3 ) def queryset(self, request, queryset): val = self.value() if", "request, queryset): val = self.value() if val: nuts3 = Nuts3.objects.get(pk=val)", "in attributes: if hasattr(obj, attribute): value = getattr(obj, attribute.__str__()) if", "and \\ obj.location.address is not None: return obj.location.address.city else: return", "in obj.location_set.all()]) geom = multipoint.centroid elif hasattr(obj, \"location\"): geom =", "Register your models here. admin.site.register(Lau1, LAU1Admin) admin.site.register(Nuts3, NUTS3Admin) admin.site.register(Road, RoadAdmin)", "\\ json.dumps(self.as_geojson(filtered_query_set)) response.context_data.update(extra_context) return response def as_geojson(self, queryset): if self.geojson_attributes:", "queryset.filter( location__geometry__intersects=nuts3.geometry) else: results = queryset return results class ArealFieldAdmin(nested_admin.NestedModelAdmin):", "search_term): \"\"\"Search all features, where MIN < area.total < MAX", "areal__area__total__lte=area_max) return filtered def changelist_view(self, request, extra_context=None): \"\"\"Adjust change list", "size search (using `<>` operator) \"\"\" result, use_distinct = super(", "#readonly_fields = (\"code\", \"name\",) class PublicTransportStopAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon", "import LeafletGeoAdmin, LeafletGeoAdminMixin from .models import Lau1 from .models import", "for l in Nuts3.objects.filter( geometry__intersects=obj.location.geometry)]) def get_search_results(self, request, queryset, search_term):", "#readonly_fields = (\"code\", \"name\",) class RailwayStationAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon", "MAX \"\"\" filtered = queryset.none() if search_term.find(\"<>\") > -1: area_min,", "result = self._search_area(queryset, search_term) return (result, use_distinct) def _search_lay1_nuts3_by_name(self, queryset,", "len(result) == 0 or len(result) == len(queryset): result = self._search_area(queryset,", "django.urls import reverse from django.utils.translation import ugettext_lazy as _ import", "= obj.address.coordinates if geom: title = None if hasattr(obj, \"title\"):", "attributes = [] data = { \"type\": \"FeatureCollection\", \"features\": []", "Airport from .models import Road from .models import PublicTransportStop from", "\"\"\"Add NUTS3 (by name) search and area size search (using", "filtered = queryset.none() for cls in (Lau1, Nuts3): objs =", "\"\"\" extra_context = extra_context or {} response = super().changelist_view( request,", "obj in nuts3 ) def queryset(self, request, queryset): val =", "change list view add GeoJSON encoded data for the queryset", "class AirportAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat =", "= 'nuts3#' def lookups(self, request, model_admin): nuts3 = Nuts3.objects.all() return", "_search_area(self, queryset, search_term): \"\"\"Search all features, where MIN < area.total", "get_search_results(self, request, queryset, search_term): \"\"\"Add NUTS3 (by name) search and", "region according to name \"\"\" filtered = queryset.none() for cls", "json.dumps(self.as_geojson(filtered_query_set)) response.context_data.update(extra_context) return response def as_geojson(self, queryset): if self.geojson_attributes: attributes", "= obj.title elif hasattr(obj, \"name\"): title = obj.name if type(obj.pk)", "hasattr(obj, \"name\"): title = obj.name if type(obj.pk) == uuid.UUID: id", "operator) \"\"\" result, use_distinct = super( ArealFieldAdmin, self).get_search_results( request, queryset,", "filtered |= objects return filtered def _search_area(self, queryset, search_term): \"\"\"Search", "AirportAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat = 6430000", "x in search_term.split(\"<>\")] filtered = queryset.filter( areal__area__total__gte=area_min, areal__area__total__lte=area_max) return filtered", "value #print(feature) data[\"features\"].append(feature) return data # Register your models here.", "[l.__str__() for l in Nuts3.objects.filter( geometry__intersects=obj.location.geometry)]) def get_search_results(self, request, queryset,", "#readonly_fields = (\"code\", \"name\",) class RoadAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon", "queryset.none() if search_term.find(\"<>\") > -1: area_min, area_max = [float(x) for", "Nuts3 class LAU1AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Lau1 class NUTS3Filter(admin.SimpleListFilter): \"\"\"Filter", "0 or len(result) == len(queryset): result = self._search_area(queryset, search_term) return", "elif hasattr(obj, \"address\"): geom = obj.address.coordinates if geom: title =", "hasattr(obj, \"title\"): title = obj.title elif hasattr(obj, \"name\"): title =", "= 7 default_lon = 1730000 default_lat = 6430000 #readonly_fields =", "results = queryset.filter( location__geometry__intersects=nuts3.geometry) else: results = queryset return results", "0 or len(result) == len(queryset): result = self._search_lay1_nuts3_by_name( queryset, search_term)", "GeoJSON encoded data for the queryset \"\"\" extra_context = extra_context", "queryset: geom = None if hasattr(obj, \"location_set\"): multipoint = geos.MultiPoint(", "{} response = super().changelist_view( request, extra_context=extra_context, ) if hasattr(response, \"context_data\"):", "-1: area_min, area_max = [float(x) for x in search_term.split(\"<>\")] filtered", "extra_context=extra_context, ) if hasattr(response, \"context_data\"): filtered_query_set = response.context_data[\"cl\"].queryset extra_context['objects_data'] =", "{ \"type\": \"FeatureCollection\", \"features\": [] } for obj in queryset:", "return ( (obj.id, obj.name) for obj in nuts3 ) def", "(\"code\", \"name\",) class RoadAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000", "\".join( [l.__str__() for l in Nuts3.objects.filter( geometry__intersects=obj.location.geometry)]) def get_search_results(self, request,", "geom: title = None if hasattr(obj, \"title\"): title = obj.title", "1730000 default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class NUTS3AdminInline(LeafletGeoAdminMixin,", "title = None if hasattr(obj, \"title\"): title = obj.title elif", "response = super().changelist_view( request, extra_context=extra_context, ) if hasattr(response, \"context_data\"): filtered_query_set", "request, extra_context=extra_context, ) if hasattr(response, \"context_data\"): filtered_query_set = response.context_data[\"cl\"].queryset extra_context['objects_data']", "= 6430000 #readonly_fields = (\"code\", \"name\",) class PublicTransportStopAdmin(LeafletGeoAdmin): default_zoom =", "None if hasattr(obj, \"title\"): title = obj.title elif hasattr(obj, \"name\"):", "== uuid.UUID: feature[attribute] = str(value) else: feature[attribute] = value #print(feature)", "feature[attribute] = value #print(feature) data[\"features\"].append(feature) return data # Register your", "import Road from .models import PublicTransportStop from .models import RailwayStation", "\"address\"): geom = obj.address.coordinates if geom: title = None if", "uuid.UUID: id = str(obj.pk) else: id = obj.pk feature =", "6430000 #readonly_fields = (\"code\", \"name\",) class RoadAdmin(LeafletGeoAdmin): default_zoom = 7", "ArealFieldAdmin, self).get_search_results( request, queryset, search_term) if search_term: if len(result) ==", "geom = multipoint.centroid elif hasattr(obj, \"location\"): geom = obj.location.geometry.centroid elif", "if len(result) == 0 or len(result) == len(queryset): result =", "class PublicTransportStopAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat =", "id } for attribute in attributes: if hasattr(obj, attribute): value", "filtered def changelist_view(self, request, extra_context=None): \"\"\"Adjust change list view add", "[] def get_place(self, obj): if hasattr(obj.location, \"address\") and \\ obj.location.address", "title = obj.name if type(obj.pk) == uuid.UUID: id = str(obj.pk)", "if type(value) == uuid.UUID: feature[attribute] = str(value) else: feature[attribute] =", "= Nuts3.objects.get(pk=val) results = queryset.filter( location__geometry__intersects=nuts3.geometry) else: results = queryset", "reverse('admin:{}_{}_change'.format( obj._meta.app_label, obj._meta.model_name), args=(obj.pk,)), }, \"geometry\": json.loads(geom.json), \"id\": id }", "= getattr(obj, attribute.__str__()) if type(value) == uuid.UUID: feature[attribute] = str(value)", "super( ArealFieldAdmin, self).get_search_results( request, queryset, search_term) if search_term: if len(result)", "obj in queryset: geom = None if hasattr(obj, \"location_set\"): multipoint", "queryset): if self.geojson_attributes: attributes = self.geojson_attributes else: attributes = []", "= 6430000 #readonly_fields = (\"code\", \"name\",) class RoadAdmin(LeafletGeoAdmin): default_zoom =", "title = obj.title elif hasattr(obj, \"name\"): title = obj.name if", "len(result) == 0 or len(result) == len(queryset): result = self._search_lay1_nuts3_by_name(", "(okresy) region according to name \"\"\" filtered = queryset.none() for", "= self._search_area(queryset, search_term) return (result, use_distinct) def _search_lay1_nuts3_by_name(self, queryset, search_term):", "objs = cls.objects.filter(name__startswith=search_term) for o in objs: objects = queryset.filter(", "== 0 or len(result) == len(queryset): result = self._search_area(queryset, search_term)", "request, extra_context=None): \"\"\"Adjust change list view add GeoJSON encoded data", "else: attributes = [] data = { \"type\": \"FeatureCollection\", \"features\":", "search_term) if search_term: if len(result) == 0 or len(result) ==", "\"name\",) class LAU1Admin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat", "for attribute in attributes: if hasattr(obj, attribute): value = getattr(obj,", "data[\"features\"].append(feature) return data # Register your models here. admin.site.register(Lau1, LAU1Admin)", "for o in objs: objects = queryset.filter( location__geometry__intersects=o.geometry) filtered |=", "LeafletGeoAdminMixin from .models import Lau1 from .models import Nuts3 from", "elif hasattr(obj, \"name\"): title = obj.name if type(obj.pk) == uuid.UUID:", "= value #print(feature) data[\"features\"].append(feature) return data # Register your models", "admin.StackedInline): model = Lau1 class NUTS3Filter(admin.SimpleListFilter): \"\"\"Filter for admin interface", "name \"\"\" filtered = queryset.none() for cls in (Lau1, Nuts3):", "queryset, search_term): \"\"\"Search NUTS3 (kraje) and LAU1 (okresy) region according", "= response.context_data[\"cl\"].queryset extra_context['objects_data'] = \\ json.dumps(self.as_geojson(filtered_query_set)) response.context_data.update(extra_context) return response def", "as_geojson(self, queryset): if self.geojson_attributes: attributes = self.geojson_attributes else: attributes =", "feature = { \"type\": \"Feature\", \"properties\": { \"name\": title, \"object_url\":", "Road from .models import PublicTransportStop from .models import RailwayStation from", "= 6430000 #readonly_fields = (\"code\", \"name\",) class LAU1Admin(LeafletGeoAdmin): default_zoom =", "django.contrib import admin from django.contrib.gis import geos from leaflet.admin import", "< MAX \"\"\" filtered = queryset.none() if search_term.find(\"<>\") > -1:", "obj.location.geometry.centroid elif hasattr(obj, \"geom\"): geom = obj.geom elif hasattr(obj, \"address\"):", "\"Feature\", \"properties\": { \"name\": title, \"object_url\": reverse('admin:{}_{}_change'.format( obj._meta.app_label, obj._meta.model_name), args=(obj.pk,)),", "attributes: if hasattr(obj, attribute): value = getattr(obj, attribute.__str__()) if type(value)", "obj._meta.model_name), args=(obj.pk,)), }, \"geometry\": json.loads(geom.json), \"id\": id } for attribute", "class NUTS3AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Nuts3 class LAU1AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model", "if search_term.find(\"<>\") > -1: area_min, area_max = [float(x) for x", "is not None: return obj.location.address.city else: return \", \".join( [l.__str__()", "class RoadAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat =", "= (\"code\", \"name\",) class RailwayStationAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon =", "= _('NUTS3 regions') parameter_name = 'nuts3#' def lookups(self, request, model_admin):", "def _search_area(self, queryset, search_term): \"\"\"Search all features, where MIN <", "the queryset \"\"\" extra_context = extra_context or {} response =", "attribute.__str__()) if type(value) == uuid.UUID: feature[attribute] = str(value) else: feature[attribute]", "search_term) if len(result) == 0 or len(result) == len(queryset): result", "6430000 #readonly_fields = (\"code\", \"name\",) class NUTS3Admin(LeafletGeoAdmin): default_zoom = 7", "_('NUTS3 regions') parameter_name = 'nuts3#' def lookups(self, request, model_admin): nuts3", "parameter_name = 'nuts3#' def lookups(self, request, model_admin): nuts3 = Nuts3.objects.all()", "for x in search_term.split(\"<>\")] filtered = queryset.filter( areal__area__total__gte=area_min, areal__area__total__lte=area_max) return", "obj.title elif hasattr(obj, \"name\"): title = obj.name if type(obj.pk) ==", "model_admin): nuts3 = Nuts3.objects.all() return ( (obj.id, obj.name) for obj", "and LAU1 (okresy) region according to name \"\"\" filtered =", "import RailwayStation from django.urls import reverse from django.utils.translation import ugettext_lazy", "queryset, search_term) if len(result) == 0 or len(result) == len(queryset):", "queryset.filter( areal__area__total__gte=area_min, areal__area__total__lte=area_max) return filtered def changelist_view(self, request, extra_context=None): \"\"\"Adjust", "search_term.find(\"<>\") > -1: area_min, area_max = [float(x) for x in", "\"address\") and \\ obj.location.address is not None: return obj.location.address.city else:", "objs: objects = queryset.filter( location__geometry__intersects=o.geometry) filtered |= objects return filtered", "(\"code\", \"name\",) class PublicTransportStopAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000", "default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class PublicTransportStopAdmin(LeafletGeoAdmin): default_zoom", "django.utils.translation import ugettext_lazy as _ import nested_admin import uuid import", "import ugettext_lazy as _ import nested_admin import uuid import json", "model = Nuts3 class LAU1AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Lau1 class", "_search_lay1_nuts3_by_name(self, queryset, search_term): \"\"\"Search NUTS3 (kraje) and LAU1 (okresy) region", "\"object_url\": reverse('admin:{}_{}_change'.format( obj._meta.app_label, obj._meta.model_name), args=(obj.pk,)), }, \"geometry\": json.loads(geom.json), \"id\": id", "if hasattr(obj, \"title\"): title = obj.title elif hasattr(obj, \"name\"): title", "import Airport from .models import Road from .models import PublicTransportStop", "= \\ json.dumps(self.as_geojson(filtered_query_set)) response.context_data.update(extra_context) return response def as_geojson(self, queryset): if", "NUTS3Filter(admin.SimpleListFilter): \"\"\"Filter for admin interface of NUTS3 regions (Kraje) \"\"\"", "= queryset.filter( location__geometry__intersects=o.geometry) filtered |= objects return filtered def _search_area(self,", "#readonly_fields = (\"code\", \"name\",) class NUTS3AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Nuts3", "title, \"object_url\": reverse('admin:{}_{}_change'.format( obj._meta.app_label, obj._meta.model_name), args=(obj.pk,)), }, \"geometry\": json.loads(geom.json), \"id\":", "obj.geom elif hasattr(obj, \"address\"): geom = obj.address.coordinates if geom: title", "> -1: area_min, area_max = [float(x) for x in search_term.split(\"<>\")]", "\"\"\" result, use_distinct = super( ArealFieldAdmin, self).get_search_results( request, queryset, search_term)", "\"location\"): geom = obj.location.geometry.centroid elif hasattr(obj, \"geom\"): geom = obj.geom", "for the queryset \"\"\" extra_context = extra_context or {} response", "{ \"type\": \"Feature\", \"properties\": { \"name\": title, \"object_url\": reverse('admin:{}_{}_change'.format( obj._meta.app_label,", "models here. admin.site.register(Lau1, LAU1Admin) admin.site.register(Nuts3, NUTS3Admin) admin.site.register(Road, RoadAdmin) admin.site.register(PublicTransportStop, PublicTransportStopAdmin)", "default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class RailwayStationAdmin(LeafletGeoAdmin): default_zoom", "obj): if hasattr(obj.location, \"address\") and \\ obj.location.address is not None:", "_ import nested_admin import uuid import json class AirportAdmin(LeafletGeoAdmin): default_zoom", "7 default_lon = 1730000 default_lat = 6430000 #readonly_fields = (\"code\",", "if hasattr(obj.location, \"address\") and \\ obj.location.address is not None: return", "def get_search_results(self, request, queryset, search_term): \"\"\"Add NUTS3 (by name) search", "val: nuts3 = Nuts3.objects.get(pk=val) results = queryset.filter( location__geometry__intersects=nuts3.geometry) else: results", "geom = obj.geom elif hasattr(obj, \"address\"): geom = obj.address.coordinates if", "or {} response = super().changelist_view( request, extra_context=extra_context, ) if hasattr(response,", "view add GeoJSON encoded data for the queryset \"\"\" extra_context", "import geos from leaflet.admin import LeafletGeoAdmin, LeafletGeoAdminMixin from .models import", "name) search and area size search (using `<>` operator) \"\"\"", "response.context_data[\"cl\"].queryset extra_context['objects_data'] = \\ json.dumps(self.as_geojson(filtered_query_set)) response.context_data.update(extra_context) return response def as_geojson(self,", "extra_context=None): \"\"\"Adjust change list view add GeoJSON encoded data for", "(\"code\", \"name\",) class NUTS3AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Nuts3 class LAU1AdminInline(LeafletGeoAdminMixin,", "admin from django.contrib.gis import geos from leaflet.admin import LeafletGeoAdmin, LeafletGeoAdminMixin", "str(value) else: feature[attribute] = value #print(feature) data[\"features\"].append(feature) return data #", "in (Lau1, Nuts3): objs = cls.objects.filter(name__startswith=search_term) for o in objs:", "= obj.geom elif hasattr(obj, \"address\"): geom = obj.address.coordinates if geom:", "PublicTransportStop from .models import RailwayStation from django.urls import reverse from", "type(obj.pk) == uuid.UUID: id = str(obj.pk) else: id = obj.pk", "attributes = self.geojson_attributes else: attributes = [] data = {", "== len(queryset): result = self._search_lay1_nuts3_by_name( queryset, search_term) if len(result) ==", "cls in (Lau1, Nuts3): objs = cls.objects.filter(name__startswith=search_term) for o in", "\"geometry\": json.loads(geom.json), \"id\": id } for attribute in attributes: if", "\"features\": [] } for obj in queryset: geom = None", "(obj.id, obj.name) for obj in nuts3 ) def queryset(self, request,", "NUTS3 regions (Kraje) \"\"\" title = _('NUTS3 regions') parameter_name =", "lookups(self, request, model_admin): nuts3 = Nuts3.objects.all() return ( (obj.id, obj.name)", "obj.name) for obj in nuts3 ) def queryset(self, request, queryset):", "def lookups(self, request, model_admin): nuts3 = Nuts3.objects.all() return ( (obj.id,", "LAU1Admin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat = 6430000", "else: feature[attribute] = value #print(feature) data[\"features\"].append(feature) return data # Register", "filtered def _search_area(self, queryset, search_term): \"\"\"Search all features, where MIN", "results = queryset return results class ArealFieldAdmin(nested_admin.NestedModelAdmin): geojson_attributes = []", "(Lau1, Nuts3): objs = cls.objects.filter(name__startswith=search_term) for o in objs: objects", "here. admin.site.register(Lau1, LAU1Admin) admin.site.register(Nuts3, NUTS3Admin) admin.site.register(Road, RoadAdmin) admin.site.register(PublicTransportStop, PublicTransportStopAdmin) admin.site.register(RailwayStation,", "return (result, use_distinct) def _search_lay1_nuts3_by_name(self, queryset, search_term): \"\"\"Search NUTS3 (kraje)", "default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class NUTS3AdminInline(LeafletGeoAdminMixin, admin.StackedInline):", "from django.contrib.gis import geos from leaflet.admin import LeafletGeoAdmin, LeafletGeoAdminMixin from", "nuts3 = Nuts3.objects.all() return ( (obj.id, obj.name) for obj in", "RailwayStation from django.urls import reverse from django.utils.translation import ugettext_lazy as", "interface of NUTS3 regions (Kraje) \"\"\" title = _('NUTS3 regions')", "result, use_distinct = super( ArealFieldAdmin, self).get_search_results( request, queryset, search_term) if", "from django.contrib import admin from django.contrib.gis import geos from leaflet.admin", "= Nuts3.objects.all() return ( (obj.id, obj.name) for obj in nuts3", "len(result) == len(queryset): result = self._search_lay1_nuts3_by_name( queryset, search_term) if len(result)", "obj.location.address.city else: return \", \".join( [l.__str__() for l in Nuts3.objects.filter(", "obj._meta.app_label, obj._meta.model_name), args=(obj.pk,)), }, \"geometry\": json.loads(geom.json), \"id\": id } for", "def get_place(self, obj): if hasattr(obj.location, \"address\") and \\ obj.location.address is", "queryset, search_term): \"\"\"Search all features, where MIN < area.total <", "\"name\",) class RoadAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat", "NUTS3Admin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat = 6430000", "value = getattr(obj, attribute.__str__()) if type(value) == uuid.UUID: feature[attribute] =", "self).get_search_results( request, queryset, search_term) if search_term: if len(result) == 0", "\"\"\"Filter for admin interface of NUTS3 regions (Kraje) \"\"\" title", "geom = obj.address.coordinates if geom: title = None if hasattr(obj,", "if hasattr(response, \"context_data\"): filtered_query_set = response.context_data[\"cl\"].queryset extra_context['objects_data'] = \\ json.dumps(self.as_geojson(filtered_query_set))", "type(value) == uuid.UUID: feature[attribute] = str(value) else: feature[attribute] = value", "1730000 default_lat = 6430000 #readonly_fields = (\"code\", \"name\",) class RoadAdmin(LeafletGeoAdmin):", "search and area size search (using `<>` operator) \"\"\" result,", "\"name\",) class NUTS3Admin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat", "obj.address.coordinates if geom: title = None if hasattr(obj, \"title\"): title", "= [] def get_place(self, obj): if hasattr(obj.location, \"address\") and \\", "\"\"\" filtered = queryset.none() for cls in (Lau1, Nuts3): objs", "hasattr(response, \"context_data\"): filtered_query_set = response.context_data[\"cl\"].queryset extra_context['objects_data'] = \\ json.dumps(self.as_geojson(filtered_query_set)) response.context_data.update(extra_context)", "LeafletGeoAdmin, LeafletGeoAdminMixin from .models import Lau1 from .models import Nuts3", "= Nuts3 class LAU1AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Lau1 class NUTS3Filter(admin.SimpleListFilter):", "results class ArealFieldAdmin(nested_admin.NestedModelAdmin): geojson_attributes = [] def get_place(self, obj): if", "queryset \"\"\" extra_context = extra_context or {} response = super().changelist_view(", ".models import Lau1 from .models import Nuts3 from .models import", "import PublicTransportStop from .models import RailwayStation from django.urls import reverse", "= 6430000 #readonly_fields = (\"code\", \"name\",) class NUTS3AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model", "= queryset.filter( location__geometry__intersects=nuts3.geometry) else: results = queryset return results class", "if geom: title = None if hasattr(obj, \"title\"): title =", "json.loads(geom.json), \"id\": id } for attribute in attributes: if hasattr(obj,", "Lau1 class NUTS3Filter(admin.SimpleListFilter): \"\"\"Filter for admin interface of NUTS3 regions", "data # Register your models here. admin.site.register(Lau1, LAU1Admin) admin.site.register(Nuts3, NUTS3Admin)", "= queryset.none() for cls in (Lau1, Nuts3): objs = cls.objects.filter(name__startswith=search_term)", "for loc in obj.location_set.all()]) geom = multipoint.centroid elif hasattr(obj, \"location\"):", "else: results = queryset return results class ArealFieldAdmin(nested_admin.NestedModelAdmin): geojson_attributes =", "\"name\",) class NUTS3AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Nuts3 class LAU1AdminInline(LeafletGeoAdminMixin, admin.StackedInline):", "in Nuts3.objects.filter( geometry__intersects=obj.location.geometry)]) def get_search_results(self, request, queryset, search_term): \"\"\"Add NUTS3", "hasattr(obj, attribute): value = getattr(obj, attribute.__str__()) if type(value) == uuid.UUID:", "= obj.location.geometry.centroid elif hasattr(obj, \"geom\"): geom = obj.geom elif hasattr(obj,", "class ArealFieldAdmin(nested_admin.NestedModelAdmin): geojson_attributes = [] def get_place(self, obj): if hasattr(obj.location,", "for obj in queryset: geom = None if hasattr(obj, \"location_set\"):", "multipoint = geos.MultiPoint( [loc.address.coordinates for loc in obj.location_set.all()]) geom =", "queryset, search_term): \"\"\"Add NUTS3 (by name) search and area size", "RailwayStationAdmin(LeafletGeoAdmin): default_zoom = 7 default_lon = 1730000 default_lat = 6430000", "\"name\": title, \"object_url\": reverse('admin:{}_{}_change'.format( obj._meta.app_label, obj._meta.model_name), args=(obj.pk,)), }, \"geometry\": json.loads(geom.json),", "\"FeatureCollection\", \"features\": [] } for obj in queryset: geom =", "and area size search (using `<>` operator) \"\"\" result, use_distinct", "admin.StackedInline): model = Nuts3 class LAU1AdminInline(LeafletGeoAdminMixin, admin.StackedInline): model = Lau1", "import reverse from django.utils.translation import ugettext_lazy as _ import nested_admin" ]
[ "umbra.common.protobuf.umbra_grpc import MonitorBase from umbra.common.protobuf.umbra_pb2 import Instruction, Snapshot from umbra.monitor.tools", "from umbra.common.protobuf.umbra_pb2 import Instruction, Snapshot from umbra.monitor.tools import Tools logger", "import Instruction, Snapshot from umbra.monitor.tools import Tools logger = logging.getLogger(__name__)", "= Tools() async def Listen(self, stream): logging.debug(\"Instruction Received\") instruction: Instruction", "stream.recv_message() instruction_dict = json_format.MessageToDict(instruction, preserving_proto_field_name=True) snapshot_dict = await self.tools.handle(instruction_dict) snapshot", "umbra.common.protobuf.umbra_pb2 import Instruction, Snapshot from umbra.monitor.tools import Tools logger =", "from umbra.monitor.tools import Tools logger = logging.getLogger(__name__) logging.getLogger(\"hpack\").setLevel(logging.WARNING) class Monitor(MonitorBase):", "def Listen(self, stream): logging.debug(\"Instruction Received\") instruction: Instruction = await stream.recv_message()", "json_format from umbra.common.protobuf.umbra_grpc import MonitorBase from umbra.common.protobuf.umbra_pb2 import Instruction, Snapshot", "stream): logging.debug(\"Instruction Received\") instruction: Instruction = await stream.recv_message() instruction_dict =", "Monitor(MonitorBase): def __init__(self, info): self.tools = Tools() async def Listen(self,", "= logging.getLogger(__name__) logging.getLogger(\"hpack\").setLevel(logging.WARNING) class Monitor(MonitorBase): def __init__(self, info): self.tools =", "async def Listen(self, stream): logging.debug(\"Instruction Received\") instruction: Instruction = await", "google.protobuf import json_format from umbra.common.protobuf.umbra_grpc import MonitorBase from umbra.common.protobuf.umbra_pb2 import", "instruction_dict = json_format.MessageToDict(instruction, preserving_proto_field_name=True) snapshot_dict = await self.tools.handle(instruction_dict) snapshot =", "self.tools = Tools() async def Listen(self, stream): logging.debug(\"Instruction Received\") instruction:", "logging.getLogger(\"hpack\").setLevel(logging.WARNING) class Monitor(MonitorBase): def __init__(self, info): self.tools = Tools() async", "import json import asyncio from google.protobuf import json_format from umbra.common.protobuf.umbra_grpc", "def __init__(self, info): self.tools = Tools() async def Listen(self, stream):", "Instruction = await stream.recv_message() instruction_dict = json_format.MessageToDict(instruction, preserving_proto_field_name=True) snapshot_dict =", "await stream.recv_message() instruction_dict = json_format.MessageToDict(instruction, preserving_proto_field_name=True) snapshot_dict = await self.tools.handle(instruction_dict)", "instruction: Instruction = await stream.recv_message() instruction_dict = json_format.MessageToDict(instruction, preserving_proto_field_name=True) snapshot_dict", "from umbra.common.protobuf.umbra_grpc import MonitorBase from umbra.common.protobuf.umbra_pb2 import Instruction, Snapshot from", "import logging import json import asyncio from google.protobuf import json_format", "Snapshot from umbra.monitor.tools import Tools logger = logging.getLogger(__name__) logging.getLogger(\"hpack\").setLevel(logging.WARNING) class", "umbra.monitor.tools import Tools logger = logging.getLogger(__name__) logging.getLogger(\"hpack\").setLevel(logging.WARNING) class Monitor(MonitorBase): def", "Tools logger = logging.getLogger(__name__) logging.getLogger(\"hpack\").setLevel(logging.WARNING) class Monitor(MonitorBase): def __init__(self, info):", "Tools() async def Listen(self, stream): logging.debug(\"Instruction Received\") instruction: Instruction =", "class Monitor(MonitorBase): def __init__(self, info): self.tools = Tools() async def", "logger = logging.getLogger(__name__) logging.getLogger(\"hpack\").setLevel(logging.WARNING) class Monitor(MonitorBase): def __init__(self, info): self.tools", "= json_format.MessageToDict(instruction, preserving_proto_field_name=True) snapshot_dict = await self.tools.handle(instruction_dict) snapshot = json_format.ParseDict(snapshot_dict,", "MonitorBase from umbra.common.protobuf.umbra_pb2 import Instruction, Snapshot from umbra.monitor.tools import Tools", "__init__(self, info): self.tools = Tools() async def Listen(self, stream): logging.debug(\"Instruction", "Received\") instruction: Instruction = await stream.recv_message() instruction_dict = json_format.MessageToDict(instruction, preserving_proto_field_name=True)", "json_format.MessageToDict(instruction, preserving_proto_field_name=True) snapshot_dict = await self.tools.handle(instruction_dict) snapshot = json_format.ParseDict(snapshot_dict, Snapshot())", "import Tools logger = logging.getLogger(__name__) logging.getLogger(\"hpack\").setLevel(logging.WARNING) class Monitor(MonitorBase): def __init__(self,", "preserving_proto_field_name=True) snapshot_dict = await self.tools.handle(instruction_dict) snapshot = json_format.ParseDict(snapshot_dict, Snapshot()) await", "import json_format from umbra.common.protobuf.umbra_grpc import MonitorBase from umbra.common.protobuf.umbra_pb2 import Instruction,", "Instruction, Snapshot from umbra.monitor.tools import Tools logger = logging.getLogger(__name__) logging.getLogger(\"hpack\").setLevel(logging.WARNING)", "from google.protobuf import json_format from umbra.common.protobuf.umbra_grpc import MonitorBase from umbra.common.protobuf.umbra_pb2", "Listen(self, stream): logging.debug(\"Instruction Received\") instruction: Instruction = await stream.recv_message() instruction_dict", "logging.debug(\"Instruction Received\") instruction: Instruction = await stream.recv_message() instruction_dict = json_format.MessageToDict(instruction,", "asyncio from google.protobuf import json_format from umbra.common.protobuf.umbra_grpc import MonitorBase from", "info): self.tools = Tools() async def Listen(self, stream): logging.debug(\"Instruction Received\")", "json import asyncio from google.protobuf import json_format from umbra.common.protobuf.umbra_grpc import", "import MonitorBase from umbra.common.protobuf.umbra_pb2 import Instruction, Snapshot from umbra.monitor.tools import", "logging import json import asyncio from google.protobuf import json_format from", "logging.getLogger(__name__) logging.getLogger(\"hpack\").setLevel(logging.WARNING) class Monitor(MonitorBase): def __init__(self, info): self.tools = Tools()", "= await stream.recv_message() instruction_dict = json_format.MessageToDict(instruction, preserving_proto_field_name=True) snapshot_dict = await", "snapshot_dict = await self.tools.handle(instruction_dict) snapshot = json_format.ParseDict(snapshot_dict, Snapshot()) await stream.send_message(snapshot)", "import asyncio from google.protobuf import json_format from umbra.common.protobuf.umbra_grpc import MonitorBase" ]
[ "deal an extra 1d6 damage to the target whenever you", "print(\"No enemy in range\") return 0 ########################################################################## def pick_target(self): \"\"\"Who", "slot of 3rd or 4th level, you can maintain your", "_, source, target): \"\"\"More damage\"\"\" if source == self.caster: return", "= self.target self._victim.add_effect(HuntersMarkEffect(caster=self.owner)) print(f\"Cast Hunters Mark on {self._victim}\") ########################################################################## def", "########################################################################## def test_cast(self): \"\"\"test casting\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.assertFalse(self.enemy.has_effect(\"Hunters Mark\"))", "points before this spell ends, you can use a bonus", "we do the spell\"\"\" if self.pick_target(): return 6 print(\"No enemy", "\"\"\"https://www.dndbeyond.com/spells/hunters-mark\"\"\" from unittest.mock import patch import dice from pycs.constant import", "you can use a bonus action on a subsequent turn", "moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) ########################################################################## def test_effect(self): \"\"\"Test the effect of", "############################################################################## ############################################################################## class HuntersMark(SpellAction): \"\"\"You choose a creature you can", "self.pick_target(): return 6 print(\"No enemy in range\") return 0 ##########################################################################", "do the spell\"\"\" if self.pick_target(): return 6 print(\"No enemy in", "new creature. At Higher Levels. When you cast this spell", "def test_removal(self): \"\"\"Test the effect gets removed\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS]", "[ActionCategory.BONUS] self.assertFalse(self.enemy.has_effect(\"Hunters Mark\")) self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) ########################################################################## def test_effect(self):", "########################################################################## def cast(self): \"\"\"Do the spell\"\"\" self._victim = self.target self._victim.add_effect(HuntersMarkEffect(caster=self.owner))", "with patch.object(Creature, \"rolld20\") as mock: mock.return_value = 18 with patch.object(dice,", "return (\"1d6\", 0, None) return (\"\", 0, None) ############################################################################## ##############################################################################", "self.caster.options_this_turn = [ActionCategory.BONUS] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunter<NAME>\")) self.caster.remove_concentration() self.assertFalse(self.enemy.has_effect(\"Hunter<NAME>\")) # EOF", "maintain your concentration on the spell for up to 8", "higher, you can maintain your concentration on the spell for", "up to 8 hours. When you use a spell slot", "########################################################################## def end_concentration(self): \"\"\"What happens when we stop concentrating\"\"\" if", "moveto=True) print(f\"{self.enemy.damage_this_turn=}\") self.assertEqual(len(self.enemy.damage_this_turn), 2) ########################################################################## def test_removal(self): \"\"\"Test the effect", "\"type\": SpellType.BUFF, } ) super().__init__(name, **kwargs) self._victim = None ##########################################################################", "Wisdom (Perception) or Wisdom (Survival) check you make to find", "patch.object(dice, \"roll\") as mock_dice: mock_dice.return_value = 5 self.caster.do_stuff(categ=ActionCategory.ACTION, moveto=True) print(f\"{self.enemy.damage_this_turn=}\")", "== self.caster: return (\"1d6\", 0, None) return (\"\", 0, None)", "SpellTest ############################################################################## ############################################################################## ############################################################################## class HuntersMark(SpellAction): \"\"\"You choose a creature", "self.assertFalse(self.enemy.has_effect(\"Hunters Mark\")) self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) ########################################################################## def test_effect(self): \"\"\"Test", "self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) self.caster.add_gear(Shortbow()) self.assertEqual(len(self.enemy.damage_this_turn), 0) with patch.object(Creature, \"rolld20\") as mock:", "= 99 self.caster.options_this_turn = [ActionCategory.BONUS, ActionCategory.ACTION] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=True) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\"))", "super().setUp() self.caster.add_action(HuntersMark()) ########################################################################## def test_cast(self): \"\"\"test casting\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS]", "hit points before this spell ends, you can use a", "maintain your concentration on the spell for up to 24", "enemy return None ########################################################################## def cast(self): \"\"\"Do the spell\"\"\" self._victim", "you can maintain your concentration on the spell for up", "SpellType.BUFF, } ) super().__init__(name, **kwargs) self._victim = None ########################################################################## def", "(\"\", 0, None) ############################################################################## ############################################################################## ############################################################################## class TestHuntersMark(SpellTest): \"\"\"Test Spell\"\"\"", "At Higher Levels. When you cast this spell using a", "of casting the spell\"\"\" print(self.caster.arena) self.caster.moves = 99 self.caster.options_this_turn =", "self.target self._victim.add_effect(HuntersMarkEffect(caster=self.owner)) print(f\"Cast Hunters Mark on {self._victim}\") ########################################################################## def end_concentration(self):", "import ActionCategory from pycs.constant import SpellType from pycs.creature import Creature", "a spell slot of 5th level or higher, you can", "= None ############################################################################## ############################################################################## ############################################################################## class HuntersMarkEffect(Effect): \"\"\"Hunters Mark Effect\"\"\"", "effect gets removed\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunter<NAME>\")) self.caster.remove_concentration()", "unittest.mock import patch import dice from pycs.constant import ActionCategory from", "import Creature from pycs.effect import Effect from pycs.gear import Shortbow", "class HuntersMark(SpellAction): \"\"\"You choose a creature you can see within", "mock_dice.return_value = 5 self.caster.do_stuff(categ=ActionCategory.ACTION, moveto=True) print(f\"{self.enemy.damage_this_turn=}\") self.assertEqual(len(self.enemy.damage_this_turn), 2) ########################################################################## def", "with a weapon attack, and you have advantage on any", "HuntersMarkEffect(Effect): \"\"\"Hunters Mark Effect\"\"\" ########################################################################## def __init__(self, **kwargs): \"\"\"Initialise\"\"\" super().__init__(\"Hunters", "############################################################################## ############################################################################## ############################################################################## class HuntersMark(SpellAction): \"\"\"You choose a creature you", "casting the spell\"\"\" print(self.caster.arena) self.caster.moves = 99 self.caster.options_this_turn = [ActionCategory.BONUS,", "print(f\"{self.enemy.damage_this_turn=}\") self.assertEqual(len(self.enemy.damage_this_turn), 2) ########################################################################## def test_removal(self): \"\"\"Test the effect gets", "5 self.caster.do_stuff(categ=ActionCategory.ACTION, moveto=True) print(f\"{self.enemy.damage_this_turn=}\") self.assertEqual(len(self.enemy.damage_this_turn), 2) ########################################################################## def test_removal(self): \"\"\"Test", "spell slot of 3rd or 4th level, you can maintain", "to 24 hours.\"\"\" ########################################################################## def __init__(self, **kwargs): name = \"Hunters", "\"\"\"Hunters Mark Effect\"\"\" ########################################################################## def __init__(self, **kwargs): \"\"\"Initialise\"\"\" super().__init__(\"Hunters Mark\",", "kwargs.update( { \"category\": ActionCategory.BONUS, \"concentration\": SpellType.CONCENTRATION, \"level\": 1, \"reach\": 90,", "\"\"\"More damage\"\"\" if source == self.caster: return (\"1d6\", 0, None)", "choose a creature you can see within range and mystically", "\"level\": 1, \"reach\": 90, \"type\": SpellType.BUFF, } ) super().__init__(name, **kwargs)", "Until the spell ends, you deal an extra 1d6 damage", "on the spell for up to 24 hours.\"\"\" ########################################################################## def", "super().__init__(name, **kwargs) self._victim = None ########################################################################## def heuristic(self): \"\"\"Should we", "self.target = enemy return enemy return None ########################################################################## def cast(self):", "from unittest.mock import patch import dice from pycs.constant import ActionCategory", "return None ########################################################################## def cast(self): \"\"\"Do the spell\"\"\" self._victim =", "\"\"\"Test Spell\"\"\" ########################################################################## def setUp(self): \"\"\"test setup\"\"\" super().setUp() self.caster.add_action(HuntersMark()) ##########################################################################", "self._victim = self.target self._victim.add_effect(HuntersMarkEffect(caster=self.owner)) print(f\"Cast Hunters Mark on {self._victim}\") ##########################################################################", "\"\"\"Initialise\"\"\" super().__init__(\"Hunters Mark\", **kwargs) ########################################################################## def hook_target_additional_damage(self, _, source, target):", "When you use a spell slot of 5th level or", "Mark\") self._victim = None ############################################################################## ############################################################################## ############################################################################## class HuntersMarkEffect(Effect): \"\"\"Hunters", "the target drops to 0 hit points before this spell", "target whenever you hit it with a weapon attack, and", "24 hours.\"\"\" ########################################################################## def __init__(self, **kwargs): name = \"Hunters Mark\"", "dice from pycs.constant import ActionCategory from pycs.constant import SpellType from", "**kwargs): name = \"Hunters Mark\" kwargs.update( { \"category\": ActionCategory.BONUS, \"concentration\":", "from pycs.constant import SpellType from pycs.creature import Creature from pycs.effect", "When you cast this spell using a spell slot of", "to 8 hours. When you use a spell slot of", "Hunters Mark on {self._victim}\") ########################################################################## def end_concentration(self): \"\"\"What happens when", "can see within range and mystically mark it as your", "import dice from pycs.constant import ActionCategory from pycs.constant import SpellType", "turn of yours to mark a new creature. At Higher", "a weapon attack, and you have advantage on any Wisdom", "self.range()[0]: continue if enemy.has_effect(\"Hunters Mark\"): continue self.target = enemy return", "from pycs.gear import Shortbow from pycs.spell import SpellAction from pycs.spells.spelltest", "if self._victim: print(f\"Removing Hunters Mark from {self._victim}\") self._victim.remove_effect(\"Hunters Mark\") self._victim", "use a bonus action on a subsequent turn of yours", "= enemy return enemy return None ########################################################################## def cast(self): \"\"\"Do", "source == self.caster: return (\"1d6\", 0, None) return (\"\", 0,", "do the spell to\"\"\" for enemy in self.owner.pick_closest_enemy(): if self.owner.distance(enemy)", "yours to mark a new creature. At Higher Levels. When", "TestHuntersMark(SpellTest): \"\"\"Test Spell\"\"\" ########################################################################## def setUp(self): \"\"\"test setup\"\"\" super().setUp() self.caster.add_action(HuntersMark())", "if enemy.has_effect(\"Hunters Mark\"): continue self.target = enemy return enemy return", "concentration on the spell for up to 8 hours. When", "casting\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.assertFalse(self.enemy.has_effect(\"Hunters Mark\")) self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\"))", "you deal an extra 1d6 damage to the target whenever", "the spell for up to 24 hours.\"\"\" ########################################################################## def __init__(self,", "bonus action on a subsequent turn of yours to mark", "and mystically mark it as your quarry. Until the spell", "Wisdom (Survival) check you make to find it. If the", "import Shortbow from pycs.spell import SpellAction from pycs.spells.spelltest import SpellTest", "before this spell ends, you can use a bonus action", "to the target whenever you hit it with a weapon", "print(f\"Cast Hunters Mark on {self._victim}\") ########################################################################## def end_concentration(self): \"\"\"What happens", "slot of 5th level or higher, you can maintain your", "= 18 with patch.object(dice, \"roll\") as mock_dice: mock_dice.return_value = 5", "level or higher, you can maintain your concentration on the", "your concentration on the spell for up to 24 hours.\"\"\"", "of 3rd or 4th level, you can maintain your concentration", "name = \"Hunters Mark\" kwargs.update( { \"category\": ActionCategory.BONUS, \"concentration\": SpellType.CONCENTRATION,", "\"concentration\": SpellType.CONCENTRATION, \"level\": 1, \"reach\": 90, \"type\": SpellType.BUFF, } )", "spell to\"\"\" for enemy in self.owner.pick_closest_enemy(): if self.owner.distance(enemy) > self.range()[0]:", "the spell\"\"\" print(self.caster.arena) self.caster.moves = 99 self.caster.options_this_turn = [ActionCategory.BONUS, ActionCategory.ACTION]", "you make to find it. If the target drops to", "\"\"\"Who should we do the spell to\"\"\" for enemy in", "1, \"reach\": 90, \"type\": SpellType.BUFF, } ) super().__init__(name, **kwargs) self._victim", "pycs.creature import Creature from pycs.effect import Effect from pycs.gear import", "this spell using a spell slot of 3rd or 4th", "a creature you can see within range and mystically mark", "on a subsequent turn of yours to mark a new", "} ) super().__init__(name, **kwargs) self._victim = None ########################################################################## def heuristic(self):", "concentration on the spell for up to 24 hours.\"\"\" ##########################################################################", "If the target drops to 0 hit points before this", "from pycs.effect import Effect from pycs.gear import Shortbow from pycs.spell", "return 6 print(\"No enemy in range\") return 0 ########################################################################## def", "Higher Levels. When you cast this spell using a spell", "the spell for up to 8 hours. When you use", "setUp(self): \"\"\"test setup\"\"\" super().setUp() self.caster.add_action(HuntersMark()) ########################################################################## def test_cast(self): \"\"\"test casting\"\"\"", "def __init__(self, **kwargs): name = \"Hunters Mark\" kwargs.update( { \"category\":", "if source == self.caster: return (\"1d6\", 0, None) return (\"\",", "= 5 self.caster.do_stuff(categ=ActionCategory.ACTION, moveto=True) print(f\"{self.enemy.damage_this_turn=}\") self.assertEqual(len(self.enemy.damage_this_turn), 2) ########################################################################## def test_removal(self):", "range\") return 0 ########################################################################## def pick_target(self): \"\"\"Who should we do", "for up to 24 hours.\"\"\" ########################################################################## def __init__(self, **kwargs): name", "<filename>pycs/spells/hunters_mark.py \"\"\"https://www.dndbeyond.com/spells/hunters-mark\"\"\" from unittest.mock import patch import dice from pycs.constant", "self._victim = None ############################################################################## ############################################################################## ############################################################################## class HuntersMarkEffect(Effect): \"\"\"Hunters Mark", "SpellAction from pycs.spells.spelltest import SpellTest ############################################################################## ############################################################################## ############################################################################## class HuntersMark(SpellAction):", "effect of casting the spell\"\"\" print(self.caster.arena) self.caster.moves = 99 self.caster.options_this_turn", "{ \"category\": ActionCategory.BONUS, \"concentration\": SpellType.CONCENTRATION, \"level\": 1, \"reach\": 90, \"type\":", "Mark\")) self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) ########################################################################## def test_effect(self): \"\"\"Test the", "weapon attack, and you have advantage on any Wisdom (Perception)", "SpellType.CONCENTRATION, \"level\": 1, \"reach\": 90, \"type\": SpellType.BUFF, } ) super().__init__(name,", "test_cast(self): \"\"\"test casting\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.assertFalse(self.enemy.has_effect(\"Hunters Mark\")) self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False)", "on the spell for up to 8 hours. When you", "########################################################################## def test_removal(self): \"\"\"Test the effect gets removed\"\"\" self.caster.options_this_turn =", "self._victim: print(f\"Removing Hunters Mark from {self._victim}\") self._victim.remove_effect(\"Hunters Mark\") self._victim =", "Effect\"\"\" ########################################################################## def __init__(self, **kwargs): \"\"\"Initialise\"\"\" super().__init__(\"Hunters Mark\", **kwargs) ##########################################################################", "########################################################################## def __init__(self, **kwargs): \"\"\"Initialise\"\"\" super().__init__(\"Hunters Mark\", **kwargs) ########################################################################## def", "None ########################################################################## def heuristic(self): \"\"\"Should we do the spell\"\"\" if", "super().__init__(\"Hunters Mark\", **kwargs) ########################################################################## def hook_target_additional_damage(self, _, source, target): \"\"\"More", "0) with patch.object(Creature, \"rolld20\") as mock: mock.return_value = 18 with", "from {self._victim}\") self._victim.remove_effect(\"Hunters Mark\") self._victim = None ############################################################################## ############################################################################## ##############################################################################", "def test_effect(self): \"\"\"Test the effect of casting the spell\"\"\" print(self.caster.arena)", "############################################################################## class TestHuntersMark(SpellTest): \"\"\"Test Spell\"\"\" ########################################################################## def setUp(self): \"\"\"test setup\"\"\"", "your quarry. Until the spell ends, you deal an extra", "your concentration on the spell for up to 8 hours.", "test_effect(self): \"\"\"Test the effect of casting the spell\"\"\" print(self.caster.arena) self.caster.moves", "you hit it with a weapon attack, and you have", ") super().__init__(name, **kwargs) self._victim = None ########################################################################## def heuristic(self): \"\"\"Should", "source, target): \"\"\"More damage\"\"\" if source == self.caster: return (\"1d6\",", "up to 24 hours.\"\"\" ########################################################################## def __init__(self, **kwargs): name =", "from pycs.spell import SpellAction from pycs.spells.spelltest import SpellTest ############################################################################## ##############################################################################", "extra 1d6 damage to the target whenever you hit it", "= [ActionCategory.BONUS, ActionCategory.ACTION] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=True) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) self.caster.add_gear(Shortbow()) self.assertEqual(len(self.enemy.damage_this_turn), 0)", "pycs.constant import SpellType from pycs.creature import Creature from pycs.effect import", "continue self.target = enemy return enemy return None ########################################################################## def", "= None ########################################################################## def heuristic(self): \"\"\"Should we do the spell\"\"\"", "Effect from pycs.gear import Shortbow from pycs.spell import SpellAction from", "hit it with a weapon attack, and you have advantage", "enemy in range\") return 0 ########################################################################## def pick_target(self): \"\"\"Who should", "level, you can maintain your concentration on the spell for", "return 0 ########################################################################## def pick_target(self): \"\"\"Who should we do the", "self.caster.add_gear(Shortbow()) self.assertEqual(len(self.enemy.damage_this_turn), 0) with patch.object(Creature, \"rolld20\") as mock: mock.return_value =", "can maintain your concentration on the spell for up to", "__init__(self, **kwargs): name = \"Hunters Mark\" kwargs.update( { \"category\": ActionCategory.BONUS,", "\"\"\"Should we do the spell\"\"\" if self.pick_target(): return 6 print(\"No", "end_concentration(self): \"\"\"What happens when we stop concentrating\"\"\" if self._victim: print(f\"Removing", "self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) ########################################################################## def test_effect(self): \"\"\"Test the effect", "########################################################################## def __init__(self, **kwargs): name = \"Hunters Mark\" kwargs.update( {", "on {self._victim}\") ########################################################################## def end_concentration(self): \"\"\"What happens when we stop", "def setUp(self): \"\"\"test setup\"\"\" super().setUp() self.caster.add_action(HuntersMark()) ########################################################################## def test_cast(self): \"\"\"test", "__init__(self, **kwargs): \"\"\"Initialise\"\"\" super().__init__(\"Hunters Mark\", **kwargs) ########################################################################## def hook_target_additional_damage(self, _,", "print(self.caster.arena) self.caster.moves = 99 self.caster.options_this_turn = [ActionCategory.BONUS, ActionCategory.ACTION] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=True)", "def heuristic(self): \"\"\"Should we do the spell\"\"\" if self.pick_target(): return", "Mark Effect\"\"\" ########################################################################## def __init__(self, **kwargs): \"\"\"Initialise\"\"\" super().__init__(\"Hunters Mark\", **kwargs)", "= [ActionCategory.BONUS] self.assertFalse(self.enemy.has_effect(\"Hunters Mark\")) self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) ########################################################################## def", "self.caster.options_this_turn = [ActionCategory.BONUS] self.assertFalse(self.enemy.has_effect(\"Hunters Mark\")) self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) ##########################################################################", "Mark\", **kwargs) ########################################################################## def hook_target_additional_damage(self, _, source, target): \"\"\"More damage\"\"\"", "hours. When you use a spell slot of 5th level", "a subsequent turn of yours to mark a new creature.", "Spell\"\"\" ########################################################################## def setUp(self): \"\"\"test setup\"\"\" super().setUp() self.caster.add_action(HuntersMark()) ########################################################################## def", "with patch.object(dice, \"roll\") as mock_dice: mock_dice.return_value = 5 self.caster.do_stuff(categ=ActionCategory.ACTION, moveto=True)", "**kwargs): \"\"\"Initialise\"\"\" super().__init__(\"Hunters Mark\", **kwargs) ########################################################################## def hook_target_additional_damage(self, _, source,", "self.caster.do_stuff(categ=ActionCategory.ACTION, moveto=True) print(f\"{self.enemy.damage_this_turn=}\") self.assertEqual(len(self.enemy.damage_this_turn), 2) ########################################################################## def test_removal(self): \"\"\"Test the", "self.assertEqual(len(self.enemy.damage_this_turn), 0) with patch.object(Creature, \"rolld20\") as mock: mock.return_value = 18", "None ############################################################################## ############################################################################## ############################################################################## class HuntersMarkEffect(Effect): \"\"\"Hunters Mark Effect\"\"\" ##########################################################################", "heuristic(self): \"\"\"Should we do the spell\"\"\" if self.pick_target(): return 6", "return (\"\", 0, None) ############################################################################## ############################################################################## ############################################################################## class TestHuntersMark(SpellTest): \"\"\"Test", "if self.owner.distance(enemy) > self.range()[0]: continue if enemy.has_effect(\"Hunters Mark\"): continue self.target", "as your quarry. Until the spell ends, you deal an", "attack, and you have advantage on any Wisdom (Perception) or", "########################################################################## def pick_target(self): \"\"\"Who should we do the spell to\"\"\"", "print(f\"Removing Hunters Mark from {self._victim}\") self._victim.remove_effect(\"Hunters Mark\") self._victim = None", "from pycs.spells.spelltest import SpellTest ############################################################################## ############################################################################## ############################################################################## class HuntersMark(SpellAction): \"\"\"You", "to mark a new creature. At Higher Levels. When you", "ActionCategory.BONUS, \"concentration\": SpellType.CONCENTRATION, \"level\": 1, \"reach\": 90, \"type\": SpellType.BUFF, }", "Mark on {self._victim}\") ########################################################################## def end_concentration(self): \"\"\"What happens when we", "gets removed\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunter<NAME>\")) self.caster.remove_concentration() self.assertFalse(self.enemy.has_effect(\"Hunter<NAME>\"))", "as mock: mock.return_value = 18 with patch.object(dice, \"roll\") as mock_dice:", "########################################################################## def setUp(self): \"\"\"test setup\"\"\" super().setUp() self.caster.add_action(HuntersMark()) ########################################################################## def test_cast(self):", "damage\"\"\" if source == self.caster: return (\"1d6\", 0, None) return", "make to find it. If the target drops to 0", "mock_dice: mock_dice.return_value = 5 self.caster.do_stuff(categ=ActionCategory.ACTION, moveto=True) print(f\"{self.enemy.damage_this_turn=}\") self.assertEqual(len(self.enemy.damage_this_turn), 2) ##########################################################################", "self._victim.remove_effect(\"Hunters Mark\") self._victim = None ############################################################################## ############################################################################## ############################################################################## class HuntersMarkEffect(Effect):", "ActionCategory from pycs.constant import SpellType from pycs.creature import Creature from", "whenever you hit it with a weapon attack, and you", "spell slot of 5th level or higher, you can maintain", "\"category\": ActionCategory.BONUS, \"concentration\": SpellType.CONCENTRATION, \"level\": 1, \"reach\": 90, \"type\": SpellType.BUFF,", "when we stop concentrating\"\"\" if self._victim: print(f\"Removing Hunters Mark from", "**kwargs) ########################################################################## def hook_target_additional_damage(self, _, source, target): \"\"\"More damage\"\"\" if", "mystically mark it as your quarry. Until the spell ends,", "we stop concentrating\"\"\" if self._victim: print(f\"Removing Hunters Mark from {self._victim}\")", "Levels. When you cast this spell using a spell slot", "############################################################################## class HuntersMarkEffect(Effect): \"\"\"Hunters Mark Effect\"\"\" ########################################################################## def __init__(self, **kwargs):", "you have advantage on any Wisdom (Perception) or Wisdom (Survival)", "\"\"\"Do the spell\"\"\" self._victim = self.target self._victim.add_effect(HuntersMarkEffect(caster=self.owner)) print(f\"Cast Hunters Mark", "to find it. If the target drops to 0 hit", "def hook_target_additional_damage(self, _, source, target): \"\"\"More damage\"\"\" if source ==", "to\"\"\" for enemy in self.owner.pick_closest_enemy(): if self.owner.distance(enemy) > self.range()[0]: continue", "Hunters Mark from {self._victim}\") self._victim.remove_effect(\"Hunters Mark\") self._victim = None ##############################################################################", "HuntersMark(SpellAction): \"\"\"You choose a creature you can see within range", "pick_target(self): \"\"\"Who should we do the spell to\"\"\" for enemy", "on any Wisdom (Perception) or Wisdom (Survival) check you make", "import Effect from pycs.gear import Shortbow from pycs.spell import SpellAction", "a new creature. At Higher Levels. When you cast this", "as mock_dice: mock_dice.return_value = 5 self.caster.do_stuff(categ=ActionCategory.ACTION, moveto=True) print(f\"{self.enemy.damage_this_turn=}\") self.assertEqual(len(self.enemy.damage_this_turn), 2)", "you use a spell slot of 5th level or higher,", "ends, you deal an extra 1d6 damage to the target", "import patch import dice from pycs.constant import ActionCategory from pycs.constant", "for enemy in self.owner.pick_closest_enemy(): if self.owner.distance(enemy) > self.range()[0]: continue if", "action on a subsequent turn of yours to mark a", "{self._victim}\") self._victim.remove_effect(\"Hunters Mark\") self._victim = None ############################################################################## ############################################################################## ############################################################################## class", "enemy in self.owner.pick_closest_enemy(): if self.owner.distance(enemy) > self.range()[0]: continue if enemy.has_effect(\"Hunters", "advantage on any Wisdom (Perception) or Wisdom (Survival) check you", "patch import dice from pycs.constant import ActionCategory from pycs.constant import", "self.caster: return (\"1d6\", 0, None) return (\"\", 0, None) ##############################################################################", "{self._victim}\") ########################################################################## def end_concentration(self): \"\"\"What happens when we stop concentrating\"\"\"", "None ########################################################################## def cast(self): \"\"\"Do the spell\"\"\" self._victim = self.target", "import SpellType from pycs.creature import Creature from pycs.effect import Effect", "0, None) return (\"\", 0, None) ############################################################################## ############################################################################## ############################################################################## class", "Mark\" kwargs.update( { \"category\": ActionCategory.BONUS, \"concentration\": SpellType.CONCENTRATION, \"level\": 1, \"reach\":", "ends, you can use a bonus action on a subsequent", "self._victim = None ########################################################################## def heuristic(self): \"\"\"Should we do the", "############################################################################## ############################################################################## class TestHuntersMark(SpellTest): \"\"\"Test Spell\"\"\" ########################################################################## def setUp(self): \"\"\"test", "def cast(self): \"\"\"Do the spell\"\"\" self._victim = self.target self._victim.add_effect(HuntersMarkEffect(caster=self.owner)) print(f\"Cast", "use a spell slot of 5th level or higher, you", "0, None) ############################################################################## ############################################################################## ############################################################################## class TestHuntersMark(SpellTest): \"\"\"Test Spell\"\"\" ##########################################################################", "########################################################################## def heuristic(self): \"\"\"Should we do the spell\"\"\" if self.pick_target():", "the effect gets removed\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunter<NAME>\"))", "pycs.gear import Shortbow from pycs.spell import SpellAction from pycs.spells.spelltest import", "pycs.spells.spelltest import SpellTest ############################################################################## ############################################################################## ############################################################################## class HuntersMark(SpellAction): \"\"\"You choose", "using a spell slot of 3rd or 4th level, you", "find it. If the target drops to 0 hit points", "def end_concentration(self): \"\"\"What happens when we stop concentrating\"\"\" if self._victim:", "############################################################################## ############################################################################## ############################################################################## class HuntersMarkEffect(Effect): \"\"\"Hunters Mark Effect\"\"\" ########################################################################## def", "\"Hunters Mark\" kwargs.update( { \"category\": ActionCategory.BONUS, \"concentration\": SpellType.CONCENTRATION, \"level\": 1,", "mark it as your quarry. Until the spell ends, you", "drops to 0 hit points before this spell ends, you", "= \"Hunters Mark\" kwargs.update( { \"category\": ActionCategory.BONUS, \"concentration\": SpellType.CONCENTRATION, \"level\":", "1d6 damage to the target whenever you hit it with", "from pycs.constant import ActionCategory from pycs.constant import SpellType from pycs.creature", "from pycs.creature import Creature from pycs.effect import Effect from pycs.gear", "self.caster.add_action(HuntersMark()) ########################################################################## def test_cast(self): \"\"\"test casting\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.assertFalse(self.enemy.has_effect(\"Hunters", "of yours to mark a new creature. At Higher Levels.", "spell using a spell slot of 3rd or 4th level,", "if self.pick_target(): return 6 print(\"No enemy in range\") return 0", "it as your quarry. Until the spell ends, you deal", "############################################################################## ############################################################################## ############################################################################## class TestHuntersMark(SpellTest): \"\"\"Test Spell\"\"\" ########################################################################## def setUp(self):", "spell ends, you deal an extra 1d6 damage to the", "damage to the target whenever you hit it with a", "and you have advantage on any Wisdom (Perception) or Wisdom", "an extra 1d6 damage to the target whenever you hit", "we do the spell to\"\"\" for enemy in self.owner.pick_closest_enemy(): if", "the spell\"\"\" if self.pick_target(): return 6 print(\"No enemy in range\")", "hours.\"\"\" ########################################################################## def __init__(self, **kwargs): name = \"Hunters Mark\" kwargs.update(", "spell\"\"\" self._victim = self.target self._victim.add_effect(HuntersMarkEffect(caster=self.owner)) print(f\"Cast Hunters Mark on {self._victim}\")", "self.assertEqual(len(self.enemy.damage_this_turn), 2) ########################################################################## def test_removal(self): \"\"\"Test the effect gets removed\"\"\"", "can use a bonus action on a subsequent turn of", "enemy return enemy return None ########################################################################## def cast(self): \"\"\"Do the", "[ActionCategory.BONUS, ActionCategory.ACTION] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=True) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) self.caster.add_gear(Shortbow()) self.assertEqual(len(self.enemy.damage_this_turn), 0) with", "\"roll\") as mock_dice: mock_dice.return_value = 5 self.caster.do_stuff(categ=ActionCategory.ACTION, moveto=True) print(f\"{self.enemy.damage_this_turn=}\") self.assertEqual(len(self.enemy.damage_this_turn),", "the spell ends, you deal an extra 1d6 damage to", "4th level, you can maintain your concentration on the spell", "removed\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunter<NAME>\")) self.caster.remove_concentration() self.assertFalse(self.enemy.has_effect(\"Hunter<NAME>\")) #", "mock: mock.return_value = 18 with patch.object(dice, \"roll\") as mock_dice: mock_dice.return_value", "self.caster.options_this_turn = [ActionCategory.BONUS, ActionCategory.ACTION] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=True) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) self.caster.add_gear(Shortbow()) self.assertEqual(len(self.enemy.damage_this_turn),", "see within range and mystically mark it as your quarry.", "import SpellTest ############################################################################## ############################################################################## ############################################################################## class HuntersMark(SpellAction): \"\"\"You choose a", "you can see within range and mystically mark it as", "quarry. Until the spell ends, you deal an extra 1d6", "(Survival) check you make to find it. If the target", "5th level or higher, you can maintain your concentration on", "Mark\"): continue self.target = enemy return enemy return None ##########################################################################", "or 4th level, you can maintain your concentration on the", "6 print(\"No enemy in range\") return 0 ########################################################################## def pick_target(self):", "(\"1d6\", 0, None) return (\"\", 0, None) ############################################################################## ############################################################################## ##############################################################################", "self.caster.moves = 99 self.caster.options_this_turn = [ActionCategory.BONUS, ActionCategory.ACTION] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=True) self.assertTrue(self.enemy.has_effect(\"Hunters", "have advantage on any Wisdom (Perception) or Wisdom (Survival) check", "18 with patch.object(dice, \"roll\") as mock_dice: mock_dice.return_value = 5 self.caster.do_stuff(categ=ActionCategory.ACTION,", "the target whenever you hit it with a weapon attack,", "8 hours. When you use a spell slot of 5th", "enemy.has_effect(\"Hunters Mark\"): continue self.target = enemy return enemy return None", "the spell\"\"\" self._victim = self.target self._victim.add_effect(HuntersMarkEffect(caster=self.owner)) print(f\"Cast Hunters Mark on", "moveto=True) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) self.caster.add_gear(Shortbow()) self.assertEqual(len(self.enemy.damage_this_turn), 0) with patch.object(Creature, \"rolld20\") as", "########################################################################## def hook_target_additional_damage(self, _, source, target): \"\"\"More damage\"\"\" if source", "to 0 hit points before this spell ends, you can", "cast(self): \"\"\"Do the spell\"\"\" self._victim = self.target self._victim.add_effect(HuntersMarkEffect(caster=self.owner)) print(f\"Cast Hunters", "SpellType from pycs.creature import Creature from pycs.effect import Effect from", "within range and mystically mark it as your quarry. Until", "for up to 8 hours. When you use a spell", "self._victim.add_effect(HuntersMarkEffect(caster=self.owner)) print(f\"Cast Hunters Mark on {self._victim}\") ########################################################################## def end_concentration(self): \"\"\"What", "None) return (\"\", 0, None) ############################################################################## ############################################################################## ############################################################################## class TestHuntersMark(SpellTest):", "target drops to 0 hit points before this spell ends,", "hook_target_additional_damage(self, _, source, target): \"\"\"More damage\"\"\" if source == self.caster:", "check you make to find it. If the target drops", "of 5th level or higher, you can maintain your concentration", "Shortbow from pycs.spell import SpellAction from pycs.spells.spelltest import SpellTest ##############################################################################", "the spell to\"\"\" for enemy in self.owner.pick_closest_enemy(): if self.owner.distance(enemy) >", "continue if enemy.has_effect(\"Hunters Mark\"): continue self.target = enemy return enemy", "it. If the target drops to 0 hit points before", "\"\"\"test setup\"\"\" super().setUp() self.caster.add_action(HuntersMark()) ########################################################################## def test_cast(self): \"\"\"test casting\"\"\" self.caster.options_this_turn", "pycs.effect import Effect from pycs.gear import Shortbow from pycs.spell import", "Mark\")) self.caster.add_gear(Shortbow()) self.assertEqual(len(self.enemy.damage_this_turn), 0) with patch.object(Creature, \"rolld20\") as mock: mock.return_value", "it with a weapon attack, and you have advantage on", "this spell ends, you can use a bonus action on", "\"\"\"test casting\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.assertFalse(self.enemy.has_effect(\"Hunters Mark\")) self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False) self.assertTrue(self.enemy.has_effect(\"Hunters", "return enemy return None ########################################################################## def cast(self): \"\"\"Do the spell\"\"\"", "None) ############################################################################## ############################################################################## ############################################################################## class TestHuntersMark(SpellTest): \"\"\"Test Spell\"\"\" ########################################################################## def", "class TestHuntersMark(SpellTest): \"\"\"Test Spell\"\"\" ########################################################################## def setUp(self): \"\"\"test setup\"\"\" super().setUp()", "target): \"\"\"More damage\"\"\" if source == self.caster: return (\"1d6\", 0,", "setup\"\"\" super().setUp() self.caster.add_action(HuntersMark()) ########################################################################## def test_cast(self): \"\"\"test casting\"\"\" self.caster.options_this_turn =", "happens when we stop concentrating\"\"\" if self._victim: print(f\"Removing Hunters Mark", "pycs.spell import SpellAction from pycs.spells.spelltest import SpellTest ############################################################################## ############################################################################## ##############################################################################", "############################################################################## ############################################################################## class HuntersMarkEffect(Effect): \"\"\"Hunters Mark Effect\"\"\" ########################################################################## def __init__(self,", "self.owner.distance(enemy) > self.range()[0]: continue if enemy.has_effect(\"Hunters Mark\"): continue self.target =", "**kwargs) self._victim = None ########################################################################## def heuristic(self): \"\"\"Should we do", "\"\"\"What happens when we stop concentrating\"\"\" if self._victim: print(f\"Removing Hunters", "def test_cast(self): \"\"\"test casting\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.assertFalse(self.enemy.has_effect(\"Hunters Mark\")) self.caster.do_stuff(categ=ActionCategory.BONUS,", "concentrating\"\"\" if self._victim: print(f\"Removing Hunters Mark from {self._victim}\") self._victim.remove_effect(\"Hunters Mark\")", "in self.owner.pick_closest_enemy(): if self.owner.distance(enemy) > self.range()[0]: continue if enemy.has_effect(\"Hunters Mark\"):", "test_removal(self): \"\"\"Test the effect gets removed\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.caster.do_stuff(categ=ActionCategory.BONUS,", "in range\") return 0 ########################################################################## def pick_target(self): \"\"\"Who should we", "ActionCategory.ACTION] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=True) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) self.caster.add_gear(Shortbow()) self.assertEqual(len(self.enemy.damage_this_turn), 0) with patch.object(Creature,", "def pick_target(self): \"\"\"Who should we do the spell to\"\"\" for", "should we do the spell to\"\"\" for enemy in self.owner.pick_closest_enemy():", "mock.return_value = 18 with patch.object(dice, \"roll\") as mock_dice: mock_dice.return_value =", "\"\"\"Test the effect of casting the spell\"\"\" print(self.caster.arena) self.caster.moves =", "0 hit points before this spell ends, you can use", "########################################################################## def test_effect(self): \"\"\"Test the effect of casting the spell\"\"\"", "Creature from pycs.effect import Effect from pycs.gear import Shortbow from", "you cast this spell using a spell slot of 3rd", "cast this spell using a spell slot of 3rd or", "range and mystically mark it as your quarry. Until the", "self.owner.pick_closest_enemy(): if self.owner.distance(enemy) > self.range()[0]: continue if enemy.has_effect(\"Hunters Mark\"): continue", "creature you can see within range and mystically mark it", "\"\"\"You choose a creature you can see within range and", "import SpellAction from pycs.spells.spelltest import SpellTest ############################################################################## ############################################################################## ############################################################################## class", "creature. At Higher Levels. When you cast this spell using", "mark a new creature. At Higher Levels. When you cast", "a bonus action on a subsequent turn of yours to", "99 self.caster.options_this_turn = [ActionCategory.BONUS, ActionCategory.ACTION] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=True) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) self.caster.add_gear(Shortbow())", "\"rolld20\") as mock: mock.return_value = 18 with patch.object(dice, \"roll\") as", "spell for up to 8 hours. When you use a", "(Perception) or Wisdom (Survival) check you make to find it.", "or Wisdom (Survival) check you make to find it. If", "a spell slot of 3rd or 4th level, you can", "or higher, you can maintain your concentration on the spell", "> self.range()[0]: continue if enemy.has_effect(\"Hunters Mark\"): continue self.target = enemy", "spell for up to 24 hours.\"\"\" ########################################################################## def __init__(self, **kwargs):", "spell\"\"\" if self.pick_target(): return 6 print(\"No enemy in range\") return", "the effect of casting the spell\"\"\" print(self.caster.arena) self.caster.moves = 99", "\"reach\": 90, \"type\": SpellType.BUFF, } ) super().__init__(name, **kwargs) self._victim =", "Mark from {self._victim}\") self._victim.remove_effect(\"Hunters Mark\") self._victim = None ############################################################################## ##############################################################################", "Mark\")) ########################################################################## def test_effect(self): \"\"\"Test the effect of casting the", "any Wisdom (Perception) or Wisdom (Survival) check you make to", "3rd or 4th level, you can maintain your concentration on", "spell ends, you can use a bonus action on a", "def __init__(self, **kwargs): \"\"\"Initialise\"\"\" super().__init__(\"Hunters Mark\", **kwargs) ########################################################################## def hook_target_additional_damage(self,", "spell\"\"\" print(self.caster.arena) self.caster.moves = 99 self.caster.options_this_turn = [ActionCategory.BONUS, ActionCategory.ACTION] self.caster.do_stuff(categ=ActionCategory.BONUS,", "patch.object(Creature, \"rolld20\") as mock: mock.return_value = 18 with patch.object(dice, \"roll\")", "############################################################################## class HuntersMark(SpellAction): \"\"\"You choose a creature you can see", "0 ########################################################################## def pick_target(self): \"\"\"Who should we do the spell", "self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=True) self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) self.caster.add_gear(Shortbow()) self.assertEqual(len(self.enemy.damage_this_turn), 0) with patch.object(Creature, \"rolld20\")", "90, \"type\": SpellType.BUFF, } ) super().__init__(name, **kwargs) self._victim = None", "self.assertTrue(self.enemy.has_effect(\"Hunters Mark\")) ########################################################################## def test_effect(self): \"\"\"Test the effect of casting", "class HuntersMarkEffect(Effect): \"\"\"Hunters Mark Effect\"\"\" ########################################################################## def __init__(self, **kwargs): \"\"\"Initialise\"\"\"", "pycs.constant import ActionCategory from pycs.constant import SpellType from pycs.creature import", "subsequent turn of yours to mark a new creature. At", "\"\"\"Test the effect gets removed\"\"\" self.caster.options_this_turn = [ActionCategory.BONUS] self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False)", "stop concentrating\"\"\" if self._victim: print(f\"Removing Hunters Mark from {self._victim}\") self._victim.remove_effect(\"Hunters", "2) ########################################################################## def test_removal(self): \"\"\"Test the effect gets removed\"\"\" self.caster.options_this_turn" ]
[ "np.arccos(0.5*np.trace(rot)-0.5) def rot_to_heading(rot): # This function calculates the heading angle", "as np def rot_to_angle(rot): return np.arccos(0.5*np.trace(rot)-0.5) def rot_to_heading(rot): # This", "w.r.t. the y-axis new_rot = rot[0:3:2, 0:3:2] # remove the", "calculates the heading angle of the rot matrix w.r.t. the", "rot_to_angle(rot): return np.arccos(0.5*np.trace(rot)-0.5) def rot_to_heading(rot): # This function calculates the", "the y-axis new_rot = rot[0:3:2, 0:3:2] # remove the mid", "This function calculates the heading angle of the rot matrix", "# remove the mid row and column corresponding to the", "of the rot matrix w.r.t. the y-axis new_rot = rot[0:3:2,", "rot_to_heading(rot): # This function calculates the heading angle of the", "angle of the rot matrix w.r.t. the y-axis new_rot =", "function calculates the heading angle of the rot matrix w.r.t.", "row and column corresponding to the y-axis new_rot = new_rot/np.linalg.det(new_rot)", "matrix w.r.t. the y-axis new_rot = rot[0:3:2, 0:3:2] # remove", "rot matrix w.r.t. the y-axis new_rot = rot[0:3:2, 0:3:2] #", "# This function calculates the heading angle of the rot", "the heading angle of the rot matrix w.r.t. the y-axis", "the rot matrix w.r.t. the y-axis new_rot = rot[0:3:2, 0:3:2]", "0:3:2] # remove the mid row and column corresponding to", "and column corresponding to the y-axis new_rot = new_rot/np.linalg.det(new_rot) return", "import numpy as np def rot_to_angle(rot): return np.arccos(0.5*np.trace(rot)-0.5) def rot_to_heading(rot):", "y-axis new_rot = rot[0:3:2, 0:3:2] # remove the mid row", "np def rot_to_angle(rot): return np.arccos(0.5*np.trace(rot)-0.5) def rot_to_heading(rot): # This function", "heading angle of the rot matrix w.r.t. the y-axis new_rot", "def rot_to_heading(rot): # This function calculates the heading angle of", "column corresponding to the y-axis new_rot = new_rot/np.linalg.det(new_rot) return np.arctan2(new_rot[1,", "numpy as np def rot_to_angle(rot): return np.arccos(0.5*np.trace(rot)-0.5) def rot_to_heading(rot): #", "rot[0:3:2, 0:3:2] # remove the mid row and column corresponding", "new_rot = rot[0:3:2, 0:3:2] # remove the mid row and", "remove the mid row and column corresponding to the y-axis", "the mid row and column corresponding to the y-axis new_rot", "the y-axis new_rot = new_rot/np.linalg.det(new_rot) return np.arctan2(new_rot[1, 0], new_rot[0, 0])", "def rot_to_angle(rot): return np.arccos(0.5*np.trace(rot)-0.5) def rot_to_heading(rot): # This function calculates", "corresponding to the y-axis new_rot = new_rot/np.linalg.det(new_rot) return np.arctan2(new_rot[1, 0],", "mid row and column corresponding to the y-axis new_rot =", "return np.arccos(0.5*np.trace(rot)-0.5) def rot_to_heading(rot): # This function calculates the heading", "= rot[0:3:2, 0:3:2] # remove the mid row and column", "to the y-axis new_rot = new_rot/np.linalg.det(new_rot) return np.arctan2(new_rot[1, 0], new_rot[0," ]
[ "GripperTester from .panda_gripper import PandaGripper from .rethink_gripper import RethinkGripper from", "\"JacoThreeFingerGripper\": JacoThreeFingerGripper, \"JacoThreeFingerDexterousGripper\": JacoThreeFingerDexterousGripper, \"WipingGripper\": WipingGripper, \"Robotiq85Gripper\": Robotiq85Gripper, \"Robotiq140Gripper\": Robotiq140Gripper,", "Robotiq85Gripper, \"Robotiq140Gripper\": Robotiq140Gripper, \"RobotiqThreeFingerGripper\": RobotiqThreeFingerGripper, \"RobotiqThreeFingerDexterousGripper\": RobotiqThreeFingerDexterousGripper, None: NullGripper, }", "Robotiq140Gripper, \"RobotiqThreeFingerGripper\": RobotiqThreeFingerGripper, \"RobotiqThreeFingerDexterousGripper\": RobotiqThreeFingerDexterousGripper, None: NullGripper, } ALL_GRIPPERS =", ".robotiq_85_gripper import Robotiq85Gripper from .robotiq_three_finger_gripper import RobotiqThreeFingerGripper, RobotiqThreeFingerDexterousGripper from .panda_gripper", "\"JacoThreeFingerDexterousGripper\": JacoThreeFingerDexterousGripper, \"WipingGripper\": WipingGripper, \"Robotiq85Gripper\": Robotiq85Gripper, \"Robotiq140Gripper\": Robotiq140Gripper, \"RobotiqThreeFingerGripper\": RobotiqThreeFingerGripper,", "= { \"RethinkGripper\": RethinkGripper, \"PandaGripper\": PandaGripper, \"JacoThreeFingerGripper\": JacoThreeFingerGripper, \"JacoThreeFingerDexterousGripper\": JacoThreeFingerDexterousGripper,", "\"Robotiq85Gripper\": Robotiq85Gripper, \"Robotiq140Gripper\": Robotiq140Gripper, \"RobotiqThreeFingerGripper\": RobotiqThreeFingerGripper, \"RobotiqThreeFingerDexterousGripper\": RobotiqThreeFingerDexterousGripper, None: NullGripper,", "\"RethinkGripper\": RethinkGripper, \"PandaGripper\": PandaGripper, \"JacoThreeFingerGripper\": JacoThreeFingerGripper, \"JacoThreeFingerDexterousGripper\": JacoThreeFingerDexterousGripper, \"WipingGripper\": WipingGripper,", "RethinkGripper, \"PandaGripper\": PandaGripper, \"JacoThreeFingerGripper\": JacoThreeFingerGripper, \"JacoThreeFingerDexterousGripper\": JacoThreeFingerDexterousGripper, \"WipingGripper\": WipingGripper, \"Robotiq85Gripper\":", "from .gripper_factory import gripper_factory from .gripper_tester import GripperTester from .panda_gripper", "import PandaGripper from .rethink_gripper import RethinkGripper from .robotiq_85_gripper import Robotiq85Gripper", "RethinkGripper from .robotiq_85_gripper import Robotiq85Gripper from .robotiq_three_finger_gripper import RobotiqThreeFingerGripper, RobotiqThreeFingerDexterousGripper", "import JacoThreeFingerGripper, JacoThreeFingerDexterousGripper from .robotiq_140_gripper import Robotiq140Gripper from .wiping_gripper import", "JacoThreeFingerGripper, JacoThreeFingerDexterousGripper from .robotiq_140_gripper import Robotiq140Gripper from .wiping_gripper import WipingGripper", "from .panda_gripper import PandaGripper from .jaco_three_finger_gripper import JacoThreeFingerGripper, JacoThreeFingerDexterousGripper from", "GRIPPER_MAPPING = { \"RethinkGripper\": RethinkGripper, \"PandaGripper\": PandaGripper, \"JacoThreeFingerGripper\": JacoThreeFingerGripper, \"JacoThreeFingerDexterousGripper\":", "import RethinkGripper from .robotiq_85_gripper import Robotiq85Gripper from .robotiq_three_finger_gripper import RobotiqThreeFingerGripper,", "from .gripper_tester import GripperTester from .panda_gripper import PandaGripper from .rethink_gripper", ".robotiq_140_gripper import Robotiq140Gripper from .wiping_gripper import WipingGripper from .null_gripper import", ".jaco_three_finger_gripper import JacoThreeFingerGripper, JacoThreeFingerDexterousGripper from .robotiq_140_gripper import Robotiq140Gripper from .wiping_gripper", "\"WipingGripper\": WipingGripper, \"Robotiq85Gripper\": Robotiq85Gripper, \"Robotiq140Gripper\": Robotiq140Gripper, \"RobotiqThreeFingerGripper\": RobotiqThreeFingerGripper, \"RobotiqThreeFingerDexterousGripper\": RobotiqThreeFingerDexterousGripper,", "WipingGripper, \"Robotiq85Gripper\": Robotiq85Gripper, \"Robotiq140Gripper\": Robotiq140Gripper, \"RobotiqThreeFingerGripper\": RobotiqThreeFingerGripper, \"RobotiqThreeFingerDexterousGripper\": RobotiqThreeFingerDexterousGripper, None:", "PandaGripper from .rethink_gripper import RethinkGripper from .robotiq_85_gripper import Robotiq85Gripper from", "PandaGripper from .jaco_three_finger_gripper import JacoThreeFingerGripper, JacoThreeFingerDexterousGripper from .robotiq_140_gripper import Robotiq140Gripper", "from .panda_gripper import PandaGripper from .rethink_gripper import RethinkGripper from .robotiq_85_gripper", "GripperModel from .gripper_factory import gripper_factory from .gripper_tester import GripperTester from", "from .robotiq_140_gripper import Robotiq140Gripper from .wiping_gripper import WipingGripper from .null_gripper", "from .wiping_gripper import WipingGripper from .null_gripper import NullGripper GRIPPER_MAPPING =", "from .rethink_gripper import RethinkGripper from .robotiq_85_gripper import Robotiq85Gripper from .robotiq_three_finger_gripper", "\"Robotiq140Gripper\": Robotiq140Gripper, \"RobotiqThreeFingerGripper\": RobotiqThreeFingerGripper, \"RobotiqThreeFingerDexterousGripper\": RobotiqThreeFingerDexterousGripper, None: NullGripper, } ALL_GRIPPERS", "\"RobotiqThreeFingerGripper\": RobotiqThreeFingerGripper, \"RobotiqThreeFingerDexterousGripper\": RobotiqThreeFingerDexterousGripper, None: NullGripper, } ALL_GRIPPERS = GRIPPER_MAPPING.keys()", "from .gripper_model import GripperModel from .gripper_factory import gripper_factory from .gripper_tester", "JacoThreeFingerDexterousGripper, \"WipingGripper\": WipingGripper, \"Robotiq85Gripper\": Robotiq85Gripper, \"Robotiq140Gripper\": Robotiq140Gripper, \"RobotiqThreeFingerGripper\": RobotiqThreeFingerGripper, \"RobotiqThreeFingerDexterousGripper\":", ".null_gripper import NullGripper GRIPPER_MAPPING = { \"RethinkGripper\": RethinkGripper, \"PandaGripper\": PandaGripper,", "Robotiq140Gripper from .wiping_gripper import WipingGripper from .null_gripper import NullGripper GRIPPER_MAPPING", "import RobotiqThreeFingerGripper, RobotiqThreeFingerDexterousGripper from .panda_gripper import PandaGripper from .jaco_three_finger_gripper import", ".panda_gripper import PandaGripper from .jaco_three_finger_gripper import JacoThreeFingerGripper, JacoThreeFingerDexterousGripper from .robotiq_140_gripper", "NullGripper GRIPPER_MAPPING = { \"RethinkGripper\": RethinkGripper, \"PandaGripper\": PandaGripper, \"JacoThreeFingerGripper\": JacoThreeFingerGripper,", "import PandaGripper from .jaco_three_finger_gripper import JacoThreeFingerGripper, JacoThreeFingerDexterousGripper from .robotiq_140_gripper import", "import Robotiq140Gripper from .wiping_gripper import WipingGripper from .null_gripper import NullGripper", "from .robotiq_three_finger_gripper import RobotiqThreeFingerGripper, RobotiqThreeFingerDexterousGripper from .panda_gripper import PandaGripper from", "import NullGripper GRIPPER_MAPPING = { \"RethinkGripper\": RethinkGripper, \"PandaGripper\": PandaGripper, \"JacoThreeFingerGripper\":", "import WipingGripper from .null_gripper import NullGripper GRIPPER_MAPPING = { \"RethinkGripper\":", "Robotiq85Gripper from .robotiq_three_finger_gripper import RobotiqThreeFingerGripper, RobotiqThreeFingerDexterousGripper from .panda_gripper import PandaGripper", "import GripperTester from .panda_gripper import PandaGripper from .rethink_gripper import RethinkGripper", "<reponame>kyungjaelee/robosuite<gh_stars>100-1000 from .gripper_model import GripperModel from .gripper_factory import gripper_factory from", "import gripper_factory from .gripper_tester import GripperTester from .panda_gripper import PandaGripper", ".rethink_gripper import RethinkGripper from .robotiq_85_gripper import Robotiq85Gripper from .robotiq_three_finger_gripper import", "RobotiqThreeFingerDexterousGripper from .panda_gripper import PandaGripper from .jaco_three_finger_gripper import JacoThreeFingerGripper, JacoThreeFingerDexterousGripper", "JacoThreeFingerDexterousGripper from .robotiq_140_gripper import Robotiq140Gripper from .wiping_gripper import WipingGripper from", "{ \"RethinkGripper\": RethinkGripper, \"PandaGripper\": PandaGripper, \"JacoThreeFingerGripper\": JacoThreeFingerGripper, \"JacoThreeFingerDexterousGripper\": JacoThreeFingerDexterousGripper, \"WipingGripper\":", ".wiping_gripper import WipingGripper from .null_gripper import NullGripper GRIPPER_MAPPING = {", ".robotiq_three_finger_gripper import RobotiqThreeFingerGripper, RobotiqThreeFingerDexterousGripper from .panda_gripper import PandaGripper from .jaco_three_finger_gripper", "\"PandaGripper\": PandaGripper, \"JacoThreeFingerGripper\": JacoThreeFingerGripper, \"JacoThreeFingerDexterousGripper\": JacoThreeFingerDexterousGripper, \"WipingGripper\": WipingGripper, \"Robotiq85Gripper\": Robotiq85Gripper,", "from .null_gripper import NullGripper GRIPPER_MAPPING = { \"RethinkGripper\": RethinkGripper, \"PandaGripper\":", "from .jaco_three_finger_gripper import JacoThreeFingerGripper, JacoThreeFingerDexterousGripper from .robotiq_140_gripper import Robotiq140Gripper from", "import GripperModel from .gripper_factory import gripper_factory from .gripper_tester import GripperTester", ".gripper_tester import GripperTester from .panda_gripper import PandaGripper from .rethink_gripper import", ".gripper_model import GripperModel from .gripper_factory import gripper_factory from .gripper_tester import", "import Robotiq85Gripper from .robotiq_three_finger_gripper import RobotiqThreeFingerGripper, RobotiqThreeFingerDexterousGripper from .panda_gripper import", "RobotiqThreeFingerGripper, RobotiqThreeFingerDexterousGripper from .panda_gripper import PandaGripper from .jaco_three_finger_gripper import JacoThreeFingerGripper,", "PandaGripper, \"JacoThreeFingerGripper\": JacoThreeFingerGripper, \"JacoThreeFingerDexterousGripper\": JacoThreeFingerDexterousGripper, \"WipingGripper\": WipingGripper, \"Robotiq85Gripper\": Robotiq85Gripper, \"Robotiq140Gripper\":", ".gripper_factory import gripper_factory from .gripper_tester import GripperTester from .panda_gripper import", "JacoThreeFingerGripper, \"JacoThreeFingerDexterousGripper\": JacoThreeFingerDexterousGripper, \"WipingGripper\": WipingGripper, \"Robotiq85Gripper\": Robotiq85Gripper, \"Robotiq140Gripper\": Robotiq140Gripper, \"RobotiqThreeFingerGripper\":", "WipingGripper from .null_gripper import NullGripper GRIPPER_MAPPING = { \"RethinkGripper\": RethinkGripper,", "from .robotiq_85_gripper import Robotiq85Gripper from .robotiq_three_finger_gripper import RobotiqThreeFingerGripper, RobotiqThreeFingerDexterousGripper from", ".panda_gripper import PandaGripper from .rethink_gripper import RethinkGripper from .robotiq_85_gripper import", "gripper_factory from .gripper_tester import GripperTester from .panda_gripper import PandaGripper from" ]
[ "= fig.add_subplot(1, 1, 1) # remove channnel dimension ax.imshow(img.squeeze()) else:", "is not None: ax.set_xlim(left=0, right=W) if H is not None:", "keypoint_names, edges, color_map, normalize=False): import random idx = random.randint(0, len(dataset)", "((s, t), c) in zip(indices, color): # Select point which", "example[\"rgb\"] rgb_joint = example[\"rgb_joint\"] rgb_camera = example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint)", "return input_point, ax, H, W def vis_point(point, img=None, color=None, ax=None):", "255. if color is not None else None for i", "plt.savefig(\"output.png\") plt.show() def visualize_rgb(dataset, keypoint_names, edges, color_map, idx=None): import random", "= example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) vis_point(rgb_vu, img=rgb, color=color, ax=ax1) vis_edge(rgb_vu,", "= fig.add_subplot(221) ax2 = fig.add_subplot(222) ax3 = fig.add_subplot(223, projection=\"3d\") ax4", "rgb_joint = normalize_joint_zyx(rgb_joint, rgb_camera, z_size) print(example[\"param\"]) vis_point(rgb_vu, img=rgb, color=color, ax=ax1)", "visualize_depth(dataset, keypoint_names, edges, color_map, normalize=False): idx = random.randint(0, len(dataset) -", "in an image \"\"\" point, ax, H, W = preprocess(point,", "(0, 255, 255), \"MAGENTA\": (255, 0, 255), } def vis_image(img,", "rgb_vu = rgb_camera.zyx2vu(rgb_joint) rgb_joint = normalize_joint_zyx(rgb_joint, rgb_camera, z_size) print(example[\"param\"]) vis_point(rgb_vu,", "= example[\"depth\"].astype(np.float32) depth_joint = example[\"depth_joint\"] depth_camera = example[\"depth_camera\"] depth_vu, depth_z", "def vis_pose(point, indices, img=None, point_color=None, edge_color=None, ax=None): ax = vis_point(point,", "# Note that [::-1] does resort coordinate order: yx ->", "else: color = [None] * len(indices) for i in range(n_inst):", "the format of one is (y, x) or (z,y,x). pts", "normalize: depth = normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size) depth_joint = normalize_joint_zyx(depth_joint, depth_camera,", "vis_image(img, ax=None): \"\"\" extend chainercv.visualizations.vis_image \"\"\" C, H, W =", "ax1 = fig.add_subplot(211) ax3 = fig.add_subplot(212, projection=\"3d\") color = [color_map[k]", "input_point.shape[-1] == 3: ax = fig.add_subplot(1, 1, 1, projection=\"3d\") else:", "img) n_inst = len(point) c = np.asarray(color) / 255. if", "for i in range(n_inst): # note that the shape of", "of one is (y, x), (z,y,x). # (K, N) ->", "0), \"CYAN\": (0, 255, 255), \"MAGENTA\": (255, 0, 255), }", "10)) ax2 = fig.add_subplot(211) ax4 = fig.add_subplot(212, projection=\"3d\") color =", "return ax def vis_edge(point, indices, img=None, color=None, ax=None): \"\"\" Visualize", "plt.figure(figsize=(5, 10)) ax1 = fig.add_subplot(211) ax3 = fig.add_subplot(212, projection=\"3d\") color", "[None] * len(indices) for i in range(n_inst): # note that", "= [color_map[s, t] for s, t in edges] rgb =", "\"MAGENTA\": (255, 0, 255), } def vis_image(img, ax=None): \"\"\" extend", "order : yx -> xy or zyx -> xyz pts", "is (K,N) and the format of one is (y, x),", "= normalize_joint_zyx(rgb_joint, rgb_camera, z_size) print(example[\"param\"]) vis_point(rgb_vu, img=rgb, color=color, ax=ax1) vis_edge(rgb_vu,", "ax2 = fig.add_subplot(222) ax3 = fig.add_subplot(223, projection=\"3d\") ax4 = fig.add_subplot(224,", "channnel dimension ax.imshow(img.squeeze()) else: ax = chainercv.visualizations.vis_image(img, ax) return ax", "coordinate order: yx -> xy or zyx -> xyz edge", "= np.asarray(color) / 255. else: color = [None] * len(indices)", "ax, img) n_inst = len(point) c = np.asarray(color) / 255.", "t), c) in zip(indices, color): # Select point which consists", "top=0) return ax def vis_pose(point, indices, img=None, point_color=None, edge_color=None, ax=None):", "pyplot as plt from mpl_toolkits.mplot3d import Axes3D # NOQA from", "ax=ax4) for ax in [ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90)", "is None: fig = plt.figure() if input_point.shape[-1] == 3: ax", "ax def vis_edge(point, indices, img=None, color=None, ax=None): \"\"\" Visualize edges", "0), \"BLUE\": (0, 0, 255), \"YELLOW\": (255, 255, 0), \"CYAN\":", "if input_point.ndim == 2: input_point = np.expand_dims(point, axis=0) H, W", "H, W = preprocess(point, ax, img) n_inst = len(point) if", "shape of `point[i]` is (K,N) and the format of one", "(255, 0, 0), \"GREEN\": (0, 255, 0), \"BLUE\": (0, 0,", "or point (start, target). # Note that [::-1] does resort", "color=color, ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) vis_point(depth_vu, img=depth, color=color, ax=ax2)", "color=None, ax=None): \"\"\" Visualize points in an image, customized to", "matplotlib import pyplot as plt from mpl_toolkits.mplot3d import Axes3D #", "t] for s, t in edges] depth = example[\"depth\"].astype(np.float32) depth_joint", "-90) plt.savefig(\"output.png\") plt.show() def visualize_rgb(dataset, keypoint_names, edges, color_map, idx=None): import", "def visualize_both(dataset, keypoint_names, edges, color_map, normalize=False): import random idx =", "visualize_both(dataset, keypoint_names, edges, color_map, normalize=False): import random idx = random.randint(0,", "Select point which consists edge. It is a pair or", "pts = point[i].transpose() # (K,N) -> (N,K) # resort coordinate", "1) # remove channnel dimension ax.imshow(img.squeeze()) else: ax = chainercv.visualizations.vis_image(img,", "depth = example[\"depth\"].astype(np.float32) depth_joint = example[\"depth_joint\"] depth_camera = example[\"depth_camera\"] depth_vu,", "in [ax3, ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show()", "W = None, None if ax is None: fig =", "(255, 0, 255), } def vis_image(img, ax=None): \"\"\" extend chainercv.visualizations.vis_image", "one is (y, x) or (z,y,x). pts = point[i] for", ": yx -> xy or zyx -> xyz pts =", "from matplotlib import pyplot as plt from mpl_toolkits.mplot3d import Axes3D", "preprocess(point, ax, img) n_inst = len(point) if color is not", "return_z=True) z_size = example[\"param\"][\"z_size\"] if normalize: depth = normalize_depth(depth, z_com=depth_z.mean(),", "ax=ax1) vis_point(rgb_joint, color=color, ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) vis_point(depth_vu, img=depth,", "fig.add_subplot(1, 1, 1) # remove channnel dimension ax.imshow(img.squeeze()) else: ax", "[ax3, ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def", "not None: ax.set_ylim(bottom=H - 1, top=0) return ax def vis_pose(point,", "H, W def vis_point(point, img=None, color=None, ax=None): \"\"\" Visualize points", "is not None: ax.set_ylim(bottom=H - 1, top=0) return ax def", "pose.hand_dataset.geometry_utils import normalize_joint_zyx from pose.hand_dataset.image_utils import normalize_depth # Decimal Code", "= len(point) c = np.asarray(color) / 255. if color is", "example\") example = dataset.get_example(idx) logger.info(\"Done get example\") fig = plt.figure(figsize=(8,", "example = dataset.get_example(idx) logger.info(\"Done get example\") fig = plt.figure(figsize=(8, 8))", "n_inst = len(point) if color is not None: color =", "= plt.figure(figsize=(5, 10)) ax1 = fig.add_subplot(211) ax3 = fig.add_subplot(212, projection=\"3d\")", "(K,N) and the format of one is (y, x) or", "= pts[[s, t]].transpose() edge = edge[::-1] ax.plot(*edge, c=c) if W", "rgb_joint = example[\"rgb_joint\"] rgb_camera = example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) vis_point(rgb_vu,", "None: ax.set_ylim(bottom=H - 1, top=0) return ax def vis_pose(point, indices,", "resort coordinate order: yx -> xy or zyx -> xyz", "customized to our purpose. Base implementation is taken from chainercv.visualizations.vis_image", "import logging logger = logging.getLogger(__name__) import random import chainercv import", "= fig.add_subplot(211) ax4 = fig.add_subplot(212, projection=\"3d\") color = [color_map[k] for", "which consists edge. It is a pair or point (start,", "or (z,y,x). pts = point[i] for ((s, t), c) in", "ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def visualize_depth(dataset, keypoint_names, edges, color_map, normalize=False):", "import random import chainercv import numpy as np from matplotlib", "ax.set_ylim(bottom=H - 1, top=0) return ax def vis_pose(point, indices, img=None,", "ax.set_xlim(left=0, right=W) if H is not None: ax.set_ylim(bottom=H - 1,", "example[\"rgb_joint\"] rgb_camera = example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) rgb_joint = normalize_joint_zyx(rgb_joint,", "def vis_image(img, ax=None): \"\"\" extend chainercv.visualizations.vis_image \"\"\" C, H, W", "plt.show() def visualize_rgb(dataset, keypoint_names, edges, color_map, idx=None): import random if", "img=None, point_color=None, edge_color=None, ax=None): ax = vis_point(point, img=img, color=point_color, ax=ax)", "get example\") fig = plt.figure(figsize=(5, 10)) ax2 = fig.add_subplot(211) ax4", "# (K, N) -> (N, K) pts = point[i].transpose() #", "dataset.get_example(idx) logger.info(\"Done get example\") fig = plt.figure(figsize=(5, 10)) ax1 =", "255), \"MAGENTA\": (255, 0, 255), } def vis_image(img, ax=None): \"\"\"", "color is not None else None for i in range(n_inst):", "8)) ax1 = fig.add_subplot(221) ax2 = fig.add_subplot(222) ax3 = fig.add_subplot(223,", "zyx -> xyz pts = pts[::-1] ax.scatter(*pts, c=c) if W", "vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) for ax in [ax3]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\")", "} def vis_image(img, ax=None): \"\"\" extend chainercv.visualizations.vis_image \"\"\" C, H,", "ax1 = fig.add_subplot(221) ax2 = fig.add_subplot(222) ax3 = fig.add_subplot(223, projection=\"3d\")", "format of one is (y, x), (z,y,x). # (K, N)", "pts = pts[::-1] ax.scatter(*pts, c=c) if W is not None:", "len(point) if color is not None: color = np.asarray(color) /", "/ 255. if color is not None else None for", "edges, color_map, normalize=False): import random idx = random.randint(0, len(dataset) -", "if ax is None: fig = plt.figure() ax = fig.add_subplot(1,", "remove channnel dimension ax.imshow(img.squeeze()) else: ax = chainercv.visualizations.vis_image(img, ax) return", "ax is None: fig = plt.figure() if input_point.shape[-1] == 3:", "as plt from mpl_toolkits.mplot3d import Axes3D # NOQA from pose.hand_dataset.geometry_utils", "fig.add_subplot(224, projection=\"3d\") color = [color_map[k] for k in keypoint_names] edge_color", "import normalize_depth # Decimal Code (R,G,B) BASE_COLOR = { \"RED\":", "None: ax.set_xlim(left=0, right=W) if H is not None: ax.set_ylim(bottom=H -", "ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def visualize_rgb(dataset,", "depth_camera, z_size) print(example[\"param\"]) vis_point(depth_vu, img=depth, color=color, ax=ax2) vis_edge(depth_vu, indices=edges, color=edge_color,", "is None: fig = plt.figure() ax = fig.add_subplot(1, 1, 1)", "preprocess(point, ax, img): input_point = np.asarray(point) if input_point.ndim == 2:", "depth_camera, z_size) rgb = example[\"rgb\"] rgb_joint = example[\"rgb_joint\"] rgb_camera =", "= vis_image(img, ax=ax) _, H, W = img.shape return input_point,", "edge = pts[[s, t]].transpose() edge = edge[::-1] ax.plot(*edge, c=c) if", "= vis_point(point, img=img, color=point_color, ax=ax) vis_edge(point, indices, img=img, color=edge_color, ax=ax)", "ax=ax4) vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4) for ax in [ax3, ax4]:", "ax = chainercv.visualizations.vis_image(img, ax) return ax def preprocess(point, ax, img):", "# (K,N) -> (N,K) # resort coordinate order : yx", "{ \"RED\": (255, 0, 0), \"GREEN\": (0, 255, 0), \"BLUE\":", "depth_z = depth_camera.zyx2vu(depth_joint, return_z=True) z_size = example[\"param\"][\"z_size\"] if normalize: depth", "z_size=z_size) depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size) rgb = example[\"rgb\"] rgb_joint", "= depth_camera.zyx2vu(depth_joint, return_z=True) z_size = example[\"param\"][\"z_size\"] if normalize: depth =", "k in keypoint_names] edge_color = [color_map[s, t] for s, t", "c = np.asarray(color) / 255. if color is not None", "plt.figure(figsize=(5, 10)) ax2 = fig.add_subplot(211) ax4 = fig.add_subplot(212, projection=\"3d\") color", "dataset.get_example(idx) logger.info(\"Done get example\") fig = plt.figure(figsize=(8, 8)) ax1 =", "rgb_vu = rgb_camera.zyx2vu(rgb_joint) vis_point(rgb_vu, img=rgb, color=color, ax=ax1) vis_edge(rgb_vu, indices=edges, color=edge_color,", "not None: ax = vis_image(img, ax=ax) _, H, W =", "if img is not None: ax = vis_image(img, ax=ax) _,", "edge[::-1] ax.plot(*edge, c=c) if W is not None: ax.set_xlim(left=0, right=W)", "image \"\"\" point, ax, H, W = preprocess(point, ax, img)", "-90) plt.savefig(\"output.png\") plt.show() def visualize_depth(dataset, keypoint_names, edges, color_map, normalize=False): idx", "ax3 = fig.add_subplot(223, projection=\"3d\") ax4 = fig.add_subplot(224, projection=\"3d\") color =", "vis_pose(point, indices, img=None, point_color=None, edge_color=None, ax=None): ax = vis_point(point, img=img,", "edge_color = [color_map[s, t] for s, t in edges] depth", "else None for i in range(n_inst): # note that the", "projection=\"3d\") color = [color_map[k] for k in keypoint_names] edge_color =", "ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def visualize_rgb(dataset, keypoint_names,", "color=point_color, ax=ax) vis_edge(point, indices, img=img, color=edge_color, ax=ax) def visualize_both(dataset, keypoint_names,", "= example[\"rgb\"] rgb_joint = example[\"rgb_joint\"] rgb_camera = example[\"rgb_camera\"] rgb_vu =", "/ 255. else: color = [None] * len(indices) for i", "1, projection=\"3d\") else: ax = fig.add_subplot(1, 1, 1) if img", "normalize_joint_zyx(depth_joint, depth_camera, z_size) print(example[\"param\"]) vis_point(depth_vu, img=depth, color=color, ax=ax2) vis_edge(depth_vu, indices=edges,", "[::-1] does resort coordinate order: yx -> xy or zyx", "visualize_rgb(dataset, keypoint_names, edges, color_map, idx=None): import random if idx is", "color_map, normalize=False): idx = random.randint(0, len(dataset) - 1) logger.info(\"get example\")", "255, 0), \"CYAN\": (0, 255, 255), \"MAGENTA\": (255, 0, 255),", "BASE_COLOR = { \"RED\": (255, 0, 0), \"GREEN\": (0, 255,", "point which consists edge. It is a pair or point", "point[i].transpose() # (K,N) -> (N,K) # resort coordinate order :", "zyx -> xyz edge = pts[[s, t]].transpose() edge = edge[::-1]", "ax=ax) def visualize_both(dataset, keypoint_names, edges, color_map, normalize=False): import random idx", "(z,y,x). # (K, N) -> (N, K) pts = point[i].transpose()", "is a pair or point (start, target). # Note that", "ax4 = fig.add_subplot(212, projection=\"3d\") color = [color_map[k] for k in", "from pose.hand_dataset.image_utils import normalize_depth # Decimal Code (R,G,B) BASE_COLOR =", "rgb_camera = example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) vis_point(rgb_vu, img=rgb, color=color, ax=ax1)", "chainercv.visualizations.vis_image \"\"\" C, H, W = img.shape if C ==", "def visualize_depth(dataset, keypoint_names, edges, color_map, normalize=False): idx = random.randint(0, len(dataset)", "depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size) print(example[\"param\"]) vis_point(depth_vu, img=depth, color=color, ax=ax2)", "chainercv import numpy as np from matplotlib import pyplot as", "ax=ax1) vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1) vis_point(rgb_joint, color=color, ax=ax3) vis_edge(rgb_joint, indices=edges,", "= rgb_camera.zyx2vu(rgb_joint) vis_point(rgb_vu, img=rgb, color=color, ax=ax1) vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1)", "ax in [ax3]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show()", "= logging.getLogger(__name__) import random import chainercv import numpy as np", "255, 0), \"BLUE\": (0, 0, 255), \"YELLOW\": (255, 255, 0),", "range(n_inst): # note that the shape of `point[i]` is (K,N)", "indices=edges, color=edge_color, ax=ax1) vis_point(rgb_joint, color=color, ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3)", "an image, customized to our purpose. Base implementation is taken", "t in edges] depth = example[\"depth\"].astype(np.float32) depth_joint = example[\"depth_joint\"] depth_camera", "None: idx = random.randint(0, len(dataset) - 1) logger.info(\"get example\") example", "point (start, target). # Note that [::-1] does resort coordinate", "z_size) rgb = example[\"rgb\"] rgb_joint = example[\"rgb_joint\"] rgb_camera = example[\"rgb_camera\"]", "[color_map[s, t] for s, t in edges] depth = example[\"depth\"].astype(np.float32)", "normalize=False): import random idx = random.randint(0, len(dataset) - 1) logger.info(\"get", "= preprocess(point, ax, img) n_inst = len(point) if color is", "dataset.get_example(idx) logger.info(\"Done get example\") fig = plt.figure(figsize=(5, 10)) ax2 =", "(0, 255, 0), \"BLUE\": (0, 0, 255), \"YELLOW\": (255, 255,", "0, 255), \"YELLOW\": (255, 255, 0), \"CYAN\": (0, 255, 255),", "input_point = np.asarray(point) if input_point.ndim == 2: input_point = np.expand_dims(point,", "pair or point (start, target). # Note that [::-1] does", "depth_joint = example[\"depth_joint\"] depth_camera = example[\"depth_camera\"] depth_vu, depth_z = depth_camera.zyx2vu(depth_joint,", "np.asarray(color) / 255. else: color = [None] * len(indices) for", "keypoint_names, edges, color_map, idx=None): import random if idx is None:", "print(example[\"param\"]) vis_point(depth_vu, img=depth, color=color, ax=ax2) vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2) vis_point(depth_joint,", "ax, img): input_point = np.asarray(point) if input_point.ndim == 2: input_point", "taken from chainercv.visualizations.vis_image \"\"\" point, ax, H, W = preprocess(point,", "example[\"depth\"].astype(np.float32) depth_joint = example[\"depth_joint\"] depth_camera = example[\"depth_camera\"] depth_vu, depth_z =", "= plt.figure() ax = fig.add_subplot(1, 1, 1) # remove channnel", "not None: ax.set_xlim(left=0, right=W) if H is not None: ax.set_ylim(bottom=H", "rgb_camera, z_size) print(example[\"param\"]) vis_point(rgb_vu, img=rgb, color=color, ax=ax1) vis_edge(rgb_vu, indices=edges, color=edge_color,", "ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def visualize_rgb(dataset, keypoint_names, edges,", "ax = vis_point(point, img=img, color=point_color, ax=ax) vis_edge(point, indices, img=img, color=edge_color,", "logger = logging.getLogger(__name__) import random import chainercv import numpy as", "color=edge_color, ax=ax1) vis_point(rgb_joint, color=color, ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) for", "normalize_joint_zyx(depth_joint, depth_camera, z_size) rgb = example[\"rgb\"] rgb_joint = example[\"rgb_joint\"] rgb_camera", "is not None: ax = vis_image(img, ax=ax) _, H, W", "target). # Note that [::-1] does resort coordinate order: yx", "W = img.shape return input_point, ax, H, W def vis_point(point,", "edges in an image \"\"\" point, ax, H, W =", "t in edges] rgb = example[\"rgb\"] rgb_joint = example[\"rgb_joint\"] rgb_camera", "(K,N) -> (N,K) # resort coordinate order : yx ->", "ax=ax4) vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4) for ax in [ax4]: ax.set_xlabel(\"x\")", "ax=ax3) vis_point(depth_vu, img=depth, color=color, ax=ax2) vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2) vis_point(depth_joint,", "to our purpose. Base implementation is taken from chainercv.visualizations.vis_image \"\"\"", "-> xy or zyx -> xyz edge = pts[[s, t]].transpose()", "Visualize edges in an image \"\"\" point, ax, H, W", "vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4) for ax in [ax3, ax4]: ax.set_xlabel(\"x\")", "vis_image(img, ax=ax) _, H, W = img.shape return input_point, ax,", "Code (R,G,B) BASE_COLOR = { \"RED\": (255, 0, 0), \"GREEN\":", "import random idx = random.randint(0, len(dataset) - 1) logger.info(\"get example\")", "H, W = preprocess(point, ax, img) n_inst = len(point) c", "(K, N) -> (N, K) pts = point[i].transpose() # (K,N)", "preprocess(point, ax, img) n_inst = len(point) c = np.asarray(color) /", "ax.imshow(img.squeeze()) else: ax = chainercv.visualizations.vis_image(img, ax) return ax def preprocess(point,", "= fig.add_subplot(212, projection=\"3d\") color = [color_map[k] for k in keypoint_names]", "1) logger.info(\"get example\") example = dataset.get_example(idx) logger.info(\"Done get example\") fig", "255, 255), \"MAGENTA\": (255, 0, 255), } def vis_image(img, ax=None):", "Visualize points in an image, customized to our purpose. Base", "(255, 255, 0), \"CYAN\": (0, 255, 255), \"MAGENTA\": (255, 0,", "implementation is taken from chainercv.visualizations.vis_image \"\"\" point, ax, H, W", "if normalize: depth = normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size) depth_joint = normalize_joint_zyx(depth_joint,", "np.asarray(point) if input_point.ndim == 2: input_point = np.expand_dims(point, axis=0) H,", "= np.asarray(point) if input_point.ndim == 2: input_point = np.expand_dims(point, axis=0)", "color=edge_color, ax=ax3) for ax in [ax3]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65,", "color): # Select point which consists edge. It is a", "# Decimal Code (R,G,B) BASE_COLOR = { \"RED\": (255, 0,", "rgb_camera.zyx2vu(rgb_joint) rgb_joint = normalize_joint_zyx(rgb_joint, rgb_camera, z_size) print(example[\"param\"]) vis_point(rgb_vu, img=rgb, color=color,", "logging logger = logging.getLogger(__name__) import random import chainercv import numpy", "img=depth, color=color, ax=ax2) vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2) vis_point(depth_joint, color=color, ax=ax4)", "purpose. Base implementation is taken from chainercv.visualizations.vis_image \"\"\" point, ax,", "extend chainercv.visualizations.vis_image \"\"\" C, H, W = img.shape if C", "W = img.shape if C == 1: if ax is", "and the format of one is (y, x), (z,y,x). #", "normalize=False): idx = random.randint(0, len(dataset) - 1) logger.info(\"get example\") example", "np.expand_dims(point, axis=0) H, W = None, None if ax is", "1, top=0) return ax def vis_pose(point, indices, img=None, point_color=None, edge_color=None,", "Axes3D # NOQA from pose.hand_dataset.geometry_utils import normalize_joint_zyx from pose.hand_dataset.image_utils import", "x), (z,y,x). # (K, N) -> (N, K) pts =", "Decimal Code (R,G,B) BASE_COLOR = { \"RED\": (255, 0, 0),", "example\") fig = plt.figure(figsize=(5, 10)) ax1 = fig.add_subplot(211) ax3 =", "None: fig = plt.figure() if input_point.shape[-1] == 3: ax =", "s, t in edges] rgb = example[\"rgb\"] rgb_joint = example[\"rgb_joint\"]", "xy or zyx -> xyz pts = pts[::-1] ax.scatter(*pts, c=c)", "example[\"depth_camera\"] depth_vu, depth_z = depth_camera.zyx2vu(depth_joint, return_z=True) z_size = example[\"param\"][\"z_size\"] if", "random idx = random.randint(0, len(dataset) - 1) logger.info(\"get example\") example", "dimension ax.imshow(img.squeeze()) else: ax = chainercv.visualizations.vis_image(img, ax) return ax def", "255), } def vis_image(img, ax=None): \"\"\" extend chainercv.visualizations.vis_image \"\"\" C,", "indices=edges, color=edge_color, ax=ax2) vis_point(depth_joint, color=color, ax=ax4) vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4)", "color is not None: color = np.asarray(color) / 255. else:", "= fig.add_subplot(224, projection=\"3d\") color = [color_map[k] for k in keypoint_names]", "ax in [ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show()", "(R,G,B) BASE_COLOR = { \"RED\": (255, 0, 0), \"GREEN\": (0,", "point_color=None, edge_color=None, ax=None): ax = vis_point(point, img=img, color=point_color, ax=ax) vis_edge(point,", "normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size) depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size) rgb =", "ax in [ax3, ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\")", "= point[i].transpose() # (K,N) -> (N,K) # resort coordinate order", "[color_map[s, t] for s, t in edges] rgb = example[\"rgb\"]", "= [None] * len(indices) for i in range(n_inst): # note", "edge. It is a pair or point (start, target). #", "ax.set_ylim(bottom=H - 1, top=0) return ax def vis_edge(point, indices, img=None,", "vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) vis_point(depth_vu, img=depth, color=color, ax=ax2) vis_edge(depth_vu, indices=edges,", "else: ax = fig.add_subplot(1, 1, 1) if img is not", "from pose.hand_dataset.geometry_utils import normalize_joint_zyx from pose.hand_dataset.image_utils import normalize_depth # Decimal", "= None, None if ax is None: fig = plt.figure()", "= normalize_joint_zyx(depth_joint, depth_camera, z_size) rgb = example[\"rgb\"] rgb_joint = example[\"rgb_joint\"]", "plt from mpl_toolkits.mplot3d import Axes3D # NOQA from pose.hand_dataset.geometry_utils import", "ax=ax2) vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2) vis_point(depth_joint, color=color, ax=ax4) vis_edge(depth_joint, indices=edges,", "keypoint_names, edges, color_map, normalize=False): idx = random.randint(0, len(dataset) - 1)", "== 1: if ax is None: fig = plt.figure() ax", "import normalize_joint_zyx from pose.hand_dataset.image_utils import normalize_depth # Decimal Code (R,G,B)", "ax3 = fig.add_subplot(212, projection=\"3d\") color = [color_map[k] for k in", "for s, t in edges] depth = example[\"depth\"].astype(np.float32) depth_joint =", "None else None for i in range(n_inst): # note that", "z_com=depth_z.mean(), z_size=z_size) depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size) print(example[\"param\"]) vis_point(depth_vu, img=depth,", "img is not None: ax = vis_image(img, ax=ax) _, H,", "fig.add_subplot(1, 1, 1, projection=\"3d\") else: ax = fig.add_subplot(1, 1, 1)", "None: ax.set_ylim(bottom=H - 1, top=0) return ax def vis_edge(point, indices,", "pts = point[i] for ((s, t), c) in zip(indices, color):", "ax=ax) vis_edge(point, indices, img=img, color=edge_color, ax=ax) def visualize_both(dataset, keypoint_names, edges,", "for ax in [ax3, ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90)", "color=edge_color, ax=ax3) vis_point(depth_vu, img=depth, color=color, ax=ax2) vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2)", "1, 1, projection=\"3d\") else: ax = fig.add_subplot(1, 1, 1) if", "fig.add_subplot(221) ax2 = fig.add_subplot(222) ax3 = fig.add_subplot(223, projection=\"3d\") ax4 =", "= { \"RED\": (255, 0, 0), \"GREEN\": (0, 255, 0),", "ax=ax3) for ax in [ax3]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90)", "xy or zyx -> xyz edge = pts[[s, t]].transpose() edge", "rgb_joint = example[\"rgb_joint\"] rgb_camera = example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) rgb_joint", "= example[\"rgb_joint\"] rgb_camera = example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) rgb_joint =", "t] for s, t in edges] rgb = example[\"rgb\"] rgb_joint", "ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def visualize_depth(dataset, keypoint_names, edges, color_map,", "img=None, color=None, ax=None): \"\"\" Visualize points in an image, customized", "= plt.figure(figsize=(5, 10)) ax2 = fig.add_subplot(211) ax4 = fig.add_subplot(212, projection=\"3d\")", "vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1) vis_point(rgb_joint, color=color, ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color,", "consists edge. It is a pair or point (start, target).", "== 3: ax = fig.add_subplot(1, 1, 1, projection=\"3d\") else: ax", "return ax def vis_pose(point, indices, img=None, point_color=None, edge_color=None, ax=None): ax", "edge_color = [color_map[s, t] for s, t in edges] rgb", "def preprocess(point, ax, img): input_point = np.asarray(point) if input_point.ndim ==", "depth_vu, depth_z = depth_camera.zyx2vu(depth_joint, return_z=True) z_size = example[\"param\"][\"z_size\"] if normalize:", "color=None, ax=None): \"\"\" Visualize edges in an image \"\"\" point,", "example\") example = dataset.get_example(idx) logger.info(\"Done get example\") fig = plt.figure(figsize=(5,", "C == 1: if ax is None: fig = plt.figure()", "ax) return ax def preprocess(point, ax, img): input_point = np.asarray(point)", "depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size) rgb = example[\"rgb\"] rgb_joint =", "example\") fig = plt.figure(figsize=(8, 8)) ax1 = fig.add_subplot(221) ax2 =", "get example\") fig = plt.figure(figsize=(8, 8)) ax1 = fig.add_subplot(221) ax2", "color=edge_color, ax=ax4) for ax in [ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65,", "xyz pts = pts[::-1] ax.scatter(*pts, c=c) if W is not", "the shape of `point[i]` is (K,N) and the format of", "points in an image, customized to our purpose. Base implementation", "vis_edge(point, indices, img=None, color=None, ax=None): \"\"\" Visualize edges in an", "= img.shape if C == 1: if ax is None:", "edges, color_map, normalize=False): idx = random.randint(0, len(dataset) - 1) logger.info(\"get", "if input_point.shape[-1] == 3: ax = fig.add_subplot(1, 1, 1, projection=\"3d\")", "pts[[s, t]].transpose() edge = edge[::-1] ax.plot(*edge, c=c) if W is", "vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2) vis_point(depth_joint, color=color, ax=ax4) vis_edge(depth_joint, indices=edges, color=edge_color,", "xyz edge = pts[[s, t]].transpose() edge = edge[::-1] ax.plot(*edge, c=c)", "(N, K) pts = point[i].transpose() # (K,N) -> (N,K) #", "and the format of one is (y, x) or (z,y,x).", "ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def visualize_rgb(dataset, keypoint_names, edges, color_map, idx=None):", "ax=ax) _, H, W = img.shape return input_point, ax, H,", "normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size) depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size) print(example[\"param\"]) vis_point(depth_vu,", "# NOQA from pose.hand_dataset.geometry_utils import normalize_joint_zyx from pose.hand_dataset.image_utils import normalize_depth", "fig = plt.figure(figsize=(5, 10)) ax2 = fig.add_subplot(211) ax4 = fig.add_subplot(212,", "ax = fig.add_subplot(1, 1, 1) # remove channnel dimension ax.imshow(img.squeeze())", "# Select point which consists edge. It is a pair", "img): input_point = np.asarray(point) if input_point.ndim == 2: input_point =", "vis_point(depth_joint, color=color, ax=ax4) vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4) for ax in", "from chainercv.visualizations.vis_image \"\"\" point, ax, H, W = preprocess(point, ax,", "example\") fig = plt.figure(figsize=(5, 10)) ax2 = fig.add_subplot(211) ax4 =", "255), \"YELLOW\": (255, 255, 0), \"CYAN\": (0, 255, 255), \"MAGENTA\":", "right=W) if H is not None: ax.set_ylim(bottom=H - 1, top=0)", "color=edge_color, ax=ax1) vis_point(rgb_joint, color=color, ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) vis_point(depth_vu,", "idx is None: idx = random.randint(0, len(dataset) - 1) logger.info(\"get", "import numpy as np from matplotlib import pyplot as plt", "= normalize_joint_zyx(depth_joint, depth_camera, z_size) print(example[\"param\"]) vis_point(depth_vu, img=depth, color=color, ax=ax2) vis_edge(depth_vu,", "input_point.ndim == 2: input_point = np.expand_dims(point, axis=0) H, W =", "z_com=depth_z.mean(), z_size=z_size) depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size) rgb = example[\"rgb\"]", "numpy as np from matplotlib import pyplot as plt from", "of `point[i]` is (K,N) and the format of one is", "len(indices) for i in range(n_inst): # note that the shape", "random.randint(0, len(dataset) - 1) logger.info(\"get example\") example = dataset.get_example(idx) logger.info(\"Done", "plt.figure() ax = fig.add_subplot(1, 1, 1) # remove channnel dimension", "the format of one is (y, x), (z,y,x). # (K,", "[ax3]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def visualize_depth(dataset,", "It is a pair or point (start, target). # Note", "= example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) rgb_joint = normalize_joint_zyx(rgb_joint, rgb_camera, z_size)", "len(point) c = np.asarray(color) / 255. if color is not", "in [ax3]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def", "= example[\"depth_camera\"] depth_vu, depth_z = depth_camera.zyx2vu(depth_joint, return_z=True) z_size = example[\"param\"][\"z_size\"]", "resort coordinate order : yx -> xy or zyx ->", "indices=edges, color=edge_color, ax=ax3) for ax in [ax3]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\")", "vis_edge(point, indices, img=img, color=edge_color, ax=ax) def visualize_both(dataset, keypoint_names, edges, color_map,", "# resort coordinate order : yx -> xy or zyx", "(y, x), (z,y,x). # (K, N) -> (N, K) pts", "point[i] for ((s, t), c) in zip(indices, color): # Select", "idx = random.randint(0, len(dataset) - 1) logger.info(\"get example\") example =", "random if idx is None: idx = random.randint(0, len(dataset) -", "color=color, ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) for ax in [ax3]:", "-> xy or zyx -> xyz pts = pts[::-1] ax.scatter(*pts,", "c) in zip(indices, color): # Select point which consists edge.", "Note that [::-1] does resort coordinate order: yx -> xy", "s, t in edges] depth = example[\"depth\"].astype(np.float32) depth_joint = example[\"depth_joint\"]", "= point[i] for ((s, t), c) in zip(indices, color): #", "- 1) logger.info(\"get example\") example = dataset.get_example(idx) logger.info(\"Done get example\")", "np.asarray(color) / 255. if color is not None else None", "mpl_toolkits.mplot3d import Axes3D # NOQA from pose.hand_dataset.geometry_utils import normalize_joint_zyx from", "(0, 0, 255), \"YELLOW\": (255, 255, 0), \"CYAN\": (0, 255,", "color=edge_color, ax=ax2) vis_point(depth_joint, color=color, ax=ax4) vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4) for", "(K,N) and the format of one is (y, x), (z,y,x).", "\"\"\" C, H, W = img.shape if C == 1:", "is not None: color = np.asarray(color) / 255. else: color", "_, H, W = img.shape return input_point, ax, H, W", "that [::-1] does resort coordinate order: yx -> xy or", "is (y, x), (z,y,x). # (K, N) -> (N, K)", "fig.add_subplot(211) ax3 = fig.add_subplot(212, projection=\"3d\") color = [color_map[k] for k", "for s, t in edges] rgb = example[\"rgb\"] rgb_joint =", "-> (N,K) # resort coordinate order : yx -> xy", "(N,K) # resort coordinate order : yx -> xy or", "img=None, color=None, ax=None): \"\"\" Visualize edges in an image \"\"\"", "1) if img is not None: ax = vis_image(img, ax=ax)", "= dataset.get_example(idx) logger.info(\"Done get example\") fig = plt.figure(figsize=(8, 8)) ax1", "for k in keypoint_names] edge_color = [color_map[s, t] for s,", "= dataset.get_example(idx) logger.info(\"Done get example\") fig = plt.figure(figsize=(5, 10)) ax1", "# note that the shape of `point[i]` is (K,N) and", "normalize_joint_zyx(rgb_joint, rgb_camera, z_size) print(example[\"param\"]) vis_point(rgb_vu, img=rgb, color=color, ax=ax1) vis_edge(rgb_vu, indices=edges,", "print(example[\"param\"]) vis_point(rgb_vu, img=rgb, color=color, ax=ax1) vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1) vis_point(rgb_joint,", "z_size) print(example[\"param\"]) vis_point(rgb_vu, img=rgb, color=color, ax=ax1) vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1)", "indices=edges, color=edge_color, ax=ax3) vis_point(depth_vu, img=depth, color=color, ax=ax2) vis_edge(depth_vu, indices=edges, color=edge_color,", "10)) ax1 = fig.add_subplot(211) ax3 = fig.add_subplot(212, projection=\"3d\") color =", "None: fig = plt.figure() ax = fig.add_subplot(1, 1, 1) #", "example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) vis_point(rgb_vu, img=rgb, color=color, ax=ax1) vis_edge(rgb_vu, indices=edges,", "fig = plt.figure() if input_point.shape[-1] == 3: ax = fig.add_subplot(1,", "example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) rgb_joint = normalize_joint_zyx(rgb_joint, rgb_camera, z_size) print(example[\"param\"])", "H, W = img.shape if C == 1: if ax", "fig = plt.figure(figsize=(8, 8)) ax1 = fig.add_subplot(221) ax2 = fig.add_subplot(222)", "rgb = example[\"rgb\"] rgb_joint = example[\"rgb_joint\"] rgb_camera = example[\"rgb_camera\"] rgb_vu", "i in range(n_inst): # note that the shape of `point[i]`", "c=c) if W is not None: ax.set_xlim(left=0, right=W) if H", "color = [None] * len(indices) for i in range(n_inst): #", "img.shape if C == 1: if ax is None: fig", "ax.plot(*edge, c=c) if W is not None: ax.set_xlim(left=0, right=W) if", "in zip(indices, color): # Select point which consists edge. It", "color = [color_map[k] for k in keypoint_names] edge_color = [color_map[s,", "len(dataset) - 1) logger.info(\"get example\") example = dataset.get_example(idx) logger.info(\"Done get", "W is not None: ax.set_xlim(left=0, right=W) if H is not", "= edge[::-1] ax.plot(*edge, c=c) if W is not None: ax.set_xlim(left=0,", "vis_point(point, img=None, color=None, ax=None): \"\"\" Visualize points in an image,", "zip(indices, color): # Select point which consists edge. It is", "color_map, idx=None): import random if idx is None: idx =", "logger.info(\"Done get example\") fig = plt.figure(figsize=(5, 10)) ax1 = fig.add_subplot(211)", "= fig.add_subplot(1, 1, 1, projection=\"3d\") else: ax = fig.add_subplot(1, 1,", "def vis_point(point, img=None, color=None, ax=None): \"\"\" Visualize points in an", "= pts[::-1] ax.scatter(*pts, c=c) if W is not None: ax.set_xlim(left=0,", "import pyplot as plt from mpl_toolkits.mplot3d import Axes3D # NOQA", "indices, img=None, point_color=None, edge_color=None, ax=None): ax = vis_point(point, img=img, color=point_color,", "color=color, ax=ax4) vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4) for ax in [ax4]:", "-> (N, K) pts = point[i].transpose() # (K,N) -> (N,K)", "color=edge_color, ax=ax4) for ax in [ax3, ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\")", "255. else: color = [None] * len(indices) for i in", "x) or (z,y,x). pts = point[i] for ((s, t), c)", "our purpose. Base implementation is taken from chainercv.visualizations.vis_image \"\"\" point,", "img) n_inst = len(point) if color is not None: color", "(z,y,x). pts = point[i] for ((s, t), c) in zip(indices,", "ax def vis_pose(point, indices, img=None, point_color=None, edge_color=None, ax=None): ax =", "from mpl_toolkits.mplot3d import Axes3D # NOQA from pose.hand_dataset.geometry_utils import normalize_joint_zyx", "projection=\"3d\") else: ax = fig.add_subplot(1, 1, 1) if img is", "if color is not None else None for i in", "\"\"\" Visualize edges in an image \"\"\" point, ax, H,", "plt.savefig(\"output.png\") plt.show() def visualize_depth(dataset, keypoint_names, edges, color_map, normalize=False): idx =", "= chainercv.visualizations.vis_image(img, ax) return ax def preprocess(point, ax, img): input_point", "ax=ax4) for ax in [ax3, ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65,", "or zyx -> xyz edge = pts[[s, t]].transpose() edge =", "edges, color_map, idx=None): import random if idx is None: idx", "for ((s, t), c) in zip(indices, color): # Select point", "= [color_map[k] for k in keypoint_names] edge_color = [color_map[s, t]", "coordinate order : yx -> xy or zyx -> xyz", "color=color, ax=ax1) vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1) vis_point(rgb_joint, color=color, ax=ax3) vis_edge(rgb_joint,", "import Axes3D # NOQA from pose.hand_dataset.geometry_utils import normalize_joint_zyx from pose.hand_dataset.image_utils", "in edges] depth = example[\"depth\"].astype(np.float32) depth_joint = example[\"depth_joint\"] depth_camera =", "z_size=z_size) depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size) print(example[\"param\"]) vis_point(depth_vu, img=depth, color=color,", "ax=None): \"\"\" Visualize points in an image, customized to our", "yx -> xy or zyx -> xyz pts = pts[::-1]", "fig.add_subplot(211) ax4 = fig.add_subplot(212, projection=\"3d\") color = [color_map[k] for k", "W def vis_point(point, img=None, color=None, ax=None): \"\"\" Visualize points in", "that the shape of `point[i]` is (K,N) and the format", "pts[::-1] ax.scatter(*pts, c=c) if W is not None: ax.set_xlim(left=0, right=W)", "`point[i]` is (K,N) and the format of one is (y,", "W = preprocess(point, ax, img) n_inst = len(point) if color", "vis_point(rgb_vu, img=rgb, color=color, ax=ax1) vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1) vis_point(rgb_joint, color=color,", "ax2 = fig.add_subplot(211) ax4 = fig.add_subplot(212, projection=\"3d\") color = [color_map[k]", "an image \"\"\" point, ax, H, W = preprocess(point, ax,", "color=color, ax=ax2) vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2) vis_point(depth_joint, color=color, ax=ax4) vis_edge(depth_joint,", "example[\"depth_joint\"] depth_camera = example[\"depth_camera\"] depth_vu, depth_z = depth_camera.zyx2vu(depth_joint, return_z=True) z_size", "= example[\"param\"][\"z_size\"] if normalize: depth = normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size) depth_joint", "\"RED\": (255, 0, 0), \"GREEN\": (0, 255, 0), \"BLUE\": (0,", "img.shape return input_point, ax, H, W def vis_point(point, img=None, color=None,", "input_point = np.expand_dims(point, axis=0) H, W = None, None if", "projection=\"3d\") ax4 = fig.add_subplot(224, projection=\"3d\") color = [color_map[k] for k", "color_map, normalize=False): import random idx = random.randint(0, len(dataset) - 1)", "1, 1) if img is not None: ax = vis_image(img,", "a pair or point (start, target). # Note that [::-1]", "ax=ax2) vis_point(depth_joint, color=color, ax=ax4) vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4) for ax", "vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4) for ax in [ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\")", "get example\") fig = plt.figure(figsize=(5, 10)) ax1 = fig.add_subplot(211) ax3", "image, customized to our purpose. Base implementation is taken from", "fig.add_subplot(1, 1, 1) if img is not None: ax =", "ax = fig.add_subplot(1, 1, 1) if img is not None:", "as np from matplotlib import pyplot as plt from mpl_toolkits.mplot3d", "ax=None): ax = vis_point(point, img=img, color=point_color, ax=ax) vis_edge(point, indices, img=img,", "indices, img=None, color=None, ax=None): \"\"\" Visualize edges in an image", "fig = plt.figure() ax = fig.add_subplot(1, 1, 1) # remove", "W = preprocess(point, ax, img) n_inst = len(point) c =", "\"\"\" Visualize points in an image, customized to our purpose.", "z_size = example[\"param\"][\"z_size\"] if normalize: depth = normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size)", "fig = plt.figure(figsize=(5, 10)) ax1 = fig.add_subplot(211) ax3 = fig.add_subplot(212,", "\"YELLOW\": (255, 255, 0), \"CYAN\": (0, 255, 255), \"MAGENTA\": (255,", "point, ax, H, W = preprocess(point, ax, img) n_inst =", "depth_camera = example[\"depth_camera\"] depth_vu, depth_z = depth_camera.zyx2vu(depth_joint, return_z=True) z_size =", "== 2: input_point = np.expand_dims(point, axis=0) H, W = None,", "def visualize_rgb(dataset, keypoint_names, edges, color_map, idx=None): import random if idx", "1, 1) # remove channnel dimension ax.imshow(img.squeeze()) else: ax =", "axis=0) H, W = None, None if ax is None:", "fig.add_subplot(212, projection=\"3d\") color = [color_map[k] for k in keypoint_names] edge_color", "fig.add_subplot(222) ax3 = fig.add_subplot(223, projection=\"3d\") ax4 = fig.add_subplot(224, projection=\"3d\") color", "- 1, top=0) return ax def vis_edge(point, indices, img=None, color=None,", "edges] depth = example[\"depth\"].astype(np.float32) depth_joint = example[\"depth_joint\"] depth_camera = example[\"depth_camera\"]", "indices=edges, color=edge_color, ax=ax4) for ax in [ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\")", "ax=ax1) vis_point(rgb_joint, color=color, ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) for ax", "ax, H, W def vis_point(point, img=None, color=None, ax=None): \"\"\" Visualize", "in an image, customized to our purpose. Base implementation is", "if ax is None: fig = plt.figure() if input_point.shape[-1] ==", "plt.show() def visualize_depth(dataset, keypoint_names, edges, color_map, normalize=False): idx = random.randint(0,", "None for i in range(n_inst): # note that the shape", "= normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size) depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size) print(example[\"param\"])", "return ax def preprocess(point, ax, img): input_point = np.asarray(point) if", "= random.randint(0, len(dataset) - 1) logger.info(\"get example\") example = dataset.get_example(idx)", "not None: ax.set_ylim(bottom=H - 1, top=0) return ax def vis_edge(point,", "= fig.add_subplot(211) ax3 = fig.add_subplot(212, projection=\"3d\") color = [color_map[k] for", "in keypoint_names] edge_color = [color_map[s, t] for s, t in", "= normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size) depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size) rgb", "def vis_edge(point, indices, img=None, color=None, ax=None): \"\"\" Visualize edges in", "logger.info(\"Done get example\") fig = plt.figure(figsize=(8, 8)) ax1 = fig.add_subplot(221)", "\"\"\" extend chainercv.visualizations.vis_image \"\"\" C, H, W = img.shape if", "ax.scatter(*pts, c=c) if W is not None: ax.set_xlim(left=0, right=W) if", "= fig.add_subplot(222) ax3 = fig.add_subplot(223, projection=\"3d\") ax4 = fig.add_subplot(224, projection=\"3d\")", "# remove channnel dimension ax.imshow(img.squeeze()) else: ax = chainercv.visualizations.vis_image(img, ax)", "ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) for ax in [ax3]: ax.set_xlabel(\"x\")", "if W is not None: ax.set_xlim(left=0, right=W) if H is", "format of one is (y, x) or (z,y,x). pts =", "input_point, ax, H, W def vis_point(point, img=None, color=None, ax=None): \"\"\"", "if H is not None: ax.set_ylim(bottom=H - 1, top=0) return", "keypoint_names] edge_color = [color_map[s, t] for s, t in edges]", "if idx is None: idx = random.randint(0, len(dataset) - 1)", "ax=None): \"\"\" extend chainercv.visualizations.vis_image \"\"\" C, H, W = img.shape", "= [color_map[s, t] for s, t in edges] depth =", "None: ax = vis_image(img, ax=ax) _, H, W = img.shape", "H, W = img.shape return input_point, ax, H, W def", "is None: idx = random.randint(0, len(dataset) - 1) logger.info(\"get example\")", "(start, target). # Note that [::-1] does resort coordinate order:", "ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) vis_point(depth_vu, img=depth, color=color, ax=ax2) vis_edge(depth_vu,", "plt.figure(figsize=(8, 8)) ax1 = fig.add_subplot(221) ax2 = fig.add_subplot(222) ax3 =", "0), \"GREEN\": (0, 255, 0), \"BLUE\": (0, 0, 255), \"YELLOW\":", "\"BLUE\": (0, 0, 255), \"YELLOW\": (255, 255, 0), \"CYAN\": (0,", "np from matplotlib import pyplot as plt from mpl_toolkits.mplot3d import", "1: if ax is None: fig = plt.figure() ax =", "yx -> xy or zyx -> xyz edge = pts[[s,", "fig.add_subplot(223, projection=\"3d\") ax4 = fig.add_subplot(224, projection=\"3d\") color = [color_map[k] for", "import random if idx is None: idx = random.randint(0, len(dataset)", "-> xyz pts = pts[::-1] ax.scatter(*pts, c=c) if W is", "ax is None: fig = plt.figure() ax = fig.add_subplot(1, 1,", "logger.info(\"Done get example\") fig = plt.figure(figsize=(5, 10)) ax2 = fig.add_subplot(211)", "img=img, color=edge_color, ax=ax) def visualize_both(dataset, keypoint_names, edges, color_map, normalize=False): import", "= np.asarray(color) / 255. if color is not None else", "is (y, x) or (z,y,x). pts = point[i] for ((s,", "normalize_depth # Decimal Code (R,G,B) BASE_COLOR = { \"RED\": (255,", "= example[\"depth_joint\"] depth_camera = example[\"depth_camera\"] depth_vu, depth_z = depth_camera.zyx2vu(depth_joint, return_z=True)", "example[\"param\"][\"z_size\"] if normalize: depth = normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size) depth_joint =", "logging.getLogger(__name__) import random import chainercv import numpy as np from", "else: ax = chainercv.visualizations.vis_image(img, ax) return ax def preprocess(point, ax,", "ax = vis_image(img, ax=ax) _, H, W = img.shape return", "or zyx -> xyz pts = pts[::-1] ax.scatter(*pts, c=c) if", "top=0) return ax def vis_edge(point, indices, img=None, color=None, ax=None): \"\"\"", "rgb_camera.zyx2vu(rgb_joint) vis_point(rgb_vu, img=rgb, color=color, ax=ax1) vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1) vis_point(rgb_joint,", "idx=None): import random if idx is None: idx = random.randint(0,", "not None else None for i in range(n_inst): # note", "2: input_point = np.expand_dims(point, axis=0) H, W = None, None", "\"GREEN\": (0, 255, 0), \"BLUE\": (0, 0, 255), \"YELLOW\": (255,", "in range(n_inst): # note that the shape of `point[i]` is", "= rgb_camera.zyx2vu(rgb_joint) rgb_joint = normalize_joint_zyx(rgb_joint, rgb_camera, z_size) print(example[\"param\"]) vis_point(rgb_vu, img=rgb,", "None: color = np.asarray(color) / 255. else: color = [None]", "vis_point(rgb_joint, color=color, ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) for ax in", "= preprocess(point, ax, img) n_inst = len(point) c = np.asarray(color)", "- 1, top=0) return ax def vis_pose(point, indices, img=None, point_color=None,", "None if ax is None: fig = plt.figure() if input_point.shape[-1]", "1, top=0) return ax def vis_edge(point, indices, img=None, color=None, ax=None):", "ax, H, W = preprocess(point, ax, img) n_inst = len(point)", "[color_map[k] for k in keypoint_names] edge_color = [color_map[s, t] for", "is not None else None for i in range(n_inst): #", "z_size) print(example[\"param\"]) vis_point(depth_vu, img=depth, color=color, ax=ax2) vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2)", "does resort coordinate order: yx -> xy or zyx ->", "in edges] rgb = example[\"rgb\"] rgb_joint = example[\"rgb_joint\"] rgb_camera =", "indices=edges, color=edge_color, ax=ax4) for ax in [ax3, ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\")", "if color is not None: color = np.asarray(color) / 255.", "pose.hand_dataset.image_utils import normalize_depth # Decimal Code (R,G,B) BASE_COLOR = {", "* len(indices) for i in range(n_inst): # note that the", "= plt.figure() if input_point.shape[-1] == 3: ax = fig.add_subplot(1, 1,", "= fig.add_subplot(223, projection=\"3d\") ax4 = fig.add_subplot(224, projection=\"3d\") color = [color_map[k]", "0, 0), \"GREEN\": (0, 255, 0), \"BLUE\": (0, 0, 255),", "indices, img=img, color=edge_color, ax=ax) def visualize_both(dataset, keypoint_names, edges, color_map, normalize=False):", "= len(point) if color is not None: color = np.asarray(color)", "img=rgb, color=color, ax=ax1) vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1) vis_point(rgb_joint, color=color, ax=ax3)", "Base implementation is taken from chainercv.visualizations.vis_image \"\"\" point, ax, H,", "edges] rgb = example[\"rgb\"] rgb_joint = example[\"rgb_joint\"] rgb_camera = example[\"rgb_camera\"]", "is taken from chainercv.visualizations.vis_image \"\"\" point, ax, H, W =", "NOQA from pose.hand_dataset.geometry_utils import normalize_joint_zyx from pose.hand_dataset.image_utils import normalize_depth #", "if C == 1: if ax is None: fig =", "None, None if ax is None: fig = plt.figure() if", "vis_point(depth_vu, img=depth, color=color, ax=ax2) vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2) vis_point(depth_joint, color=color,", "normalize_joint_zyx from pose.hand_dataset.image_utils import normalize_depth # Decimal Code (R,G,B) BASE_COLOR", "color = np.asarray(color) / 255. else: color = [None] *", "t]].transpose() edge = edge[::-1] ax.plot(*edge, c=c) if W is not", "color=edge_color, ax=ax) def visualize_both(dataset, keypoint_names, edges, color_map, normalize=False): import random", "-> xyz edge = pts[[s, t]].transpose() edge = edge[::-1] ax.plot(*edge,", "example = dataset.get_example(idx) logger.info(\"Done get example\") fig = plt.figure(figsize=(5, 10))", "ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def visualize_depth(dataset, keypoint_names, edges,", "color=color, ax=ax4) vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4) for ax in [ax3,", "N) -> (N, K) pts = point[i].transpose() # (K,N) ->", "example[\"rgb_joint\"] rgb_camera = example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) vis_point(rgb_vu, img=rgb, color=color,", "\"CYAN\": (0, 255, 255), \"MAGENTA\": (255, 0, 255), } def", "vis_point(rgb_joint, color=color, ax=ax3) vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3) vis_point(depth_vu, img=depth, color=color,", "edge = edge[::-1] ax.plot(*edge, c=c) if W is not None:", "ax = fig.add_subplot(1, 1, 1, projection=\"3d\") else: ax = fig.add_subplot(1,", "img=img, color=point_color, ax=ax) vis_edge(point, indices, img=img, color=edge_color, ax=ax) def visualize_both(dataset,", "depth = normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size) depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size)", "3: ax = fig.add_subplot(1, 1, 1, projection=\"3d\") else: ax =", "C, H, W = img.shape if C == 1: if", "ax def preprocess(point, ax, img): input_point = np.asarray(point) if input_point.ndim", "ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def visualize_rgb(dataset, keypoint_names, edges, color_map,", "= fig.add_subplot(1, 1, 1) if img is not None: ax", "= dataset.get_example(idx) logger.info(\"Done get example\") fig = plt.figure(figsize=(5, 10)) ax2", "plt.figure() if input_point.shape[-1] == 3: ax = fig.add_subplot(1, 1, 1,", "n_inst = len(point) c = np.asarray(color) / 255. if color", "is (K,N) and the format of one is (y, x)", "random import chainercv import numpy as np from matplotlib import", "= np.expand_dims(point, axis=0) H, W = None, None if ax", "0, 255), } def vis_image(img, ax=None): \"\"\" extend chainercv.visualizations.vis_image \"\"\"", "(y, x) or (z,y,x). pts = point[i] for ((s, t),", "chainercv.visualizations.vis_image \"\"\" point, ax, H, W = preprocess(point, ax, img)", "of one is (y, x) or (z,y,x). pts = point[i]", "ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\") plt.show() def visualize_depth(dataset, keypoint_names,", "chainercv.visualizations.vis_image(img, ax) return ax def preprocess(point, ax, img): input_point =", "\"\"\" point, ax, H, W = preprocess(point, ax, img) n_inst", "for ax in [ax4]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\")", "logger.info(\"get example\") example = dataset.get_example(idx) logger.info(\"Done get example\") fig =", "one is (y, x), (z,y,x). # (K, N) -> (N,", "= example[\"rgb_joint\"] rgb_camera = example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) vis_point(rgb_vu, img=rgb,", "ax=None): \"\"\" Visualize edges in an image \"\"\" point, ax,", "vis_point(point, img=img, color=point_color, ax=ax) vis_edge(point, indices, img=img, color=edge_color, ax=ax) def", "rgb_camera = example[\"rgb_camera\"] rgb_vu = rgb_camera.zyx2vu(rgb_joint) rgb_joint = normalize_joint_zyx(rgb_joint, rgb_camera,", "for ax in [ax3]: ax.set_xlabel(\"x\") ax.set_ylabel(\"y\") ax.set_zlabel(\"z\") ax.view_init(-65, -90) plt.savefig(\"output.png\")", "H, W = None, None if ax is None: fig", "= plt.figure(figsize=(8, 8)) ax1 = fig.add_subplot(221) ax2 = fig.add_subplot(222) ax3", "ax, img) n_inst = len(point) if color is not None:", "ax4 = fig.add_subplot(224, projection=\"3d\") color = [color_map[k] for k in", "K) pts = point[i].transpose() # (K,N) -> (N,K) # resort", "order: yx -> xy or zyx -> xyz edge =", "H is not None: ax.set_ylim(bottom=H - 1, top=0) return ax", "edge_color=None, ax=None): ax = vis_point(point, img=img, color=point_color, ax=ax) vis_edge(point, indices,", "note that the shape of `point[i]` is (K,N) and the", "import chainercv import numpy as np from matplotlib import pyplot", "= img.shape return input_point, ax, H, W def vis_point(point, img=None,", "not None: color = np.asarray(color) / 255. else: color =", "depth_camera.zyx2vu(depth_joint, return_z=True) z_size = example[\"param\"][\"z_size\"] if normalize: depth = normalize_depth(depth," ]
[ "python3 from flask import Flask app = Flask(__name__) @app.route('/process-email') def", "#!/usr/bin/env python3 from flask import Flask app = Flask(__name__) @app.route('/process-email')", "Flask app = Flask(__name__) @app.route('/process-email') def process_email(): return \"Hello World!\"", "def process_email(): return \"Hello World!\" if __name__ == \"__main__\": app.run()", "flask import Flask app = Flask(__name__) @app.route('/process-email') def process_email(): return", "Flask(__name__) @app.route('/process-email') def process_email(): return \"Hello World!\" if __name__ ==", "from flask import Flask app = Flask(__name__) @app.route('/process-email') def process_email():", "app = Flask(__name__) @app.route('/process-email') def process_email(): return \"Hello World!\" if", "<reponame>ticapix/automated-tasks<gh_stars>0 #!/usr/bin/env python3 from flask import Flask app = Flask(__name__)", "import Flask app = Flask(__name__) @app.route('/process-email') def process_email(): return \"Hello", "@app.route('/process-email') def process_email(): return \"Hello World!\" if __name__ == \"__main__\":", "= Flask(__name__) @app.route('/process-email') def process_email(): return \"Hello World!\" if __name__" ]
[ "m.eval(); return m def get_speaking_detector(e): m = torch.load('../data/speaker/model.e{}.pt'.format(e)) m =", "torch.nn.Sequential( torch.nn.Linear(512, 2) ) # print(len(list(self.features.parameters()))) for p in list(self.features.parameters())[:20]:", "import os import skimage.io from torch.nn import Module import torch.nn", "= resnet18(pretrained=True) self.features = torch.nn.Sequential(*list(resnet.children())[:-1]) self.classifier = torch.nn.Sequential( torch.nn.Linear(512, 2)", "os.environ['TORCH_MODEL_ZOO'] = '../data/' VIDTIMIT_PATH = '../data/vidtimit/' skimage.io.use_plugin('pil') class Net(Module): def", "self.classifier(f) return y def get_speaking_detector_final(): m = torch.load('../data/speaker.pt') m =", "from nn.speaker_dataset import Dataset # @UnusedImport os.environ['TORCH_MODEL_ZOO'] = '../data/' VIDTIMIT_PATH", "print(len(list(self.features.parameters()))) for p in list(self.features.parameters())[:20]: p.requires_grad = False def forward(self,", "X = F.softmax(self.basenet(X)) f = self.features(x) f = f.view(f.size(0), -1)", "get_speaking_detector_final(): m = torch.load('../data/speaker.pt') m = m.eval(); return m def", "return m def get_speaking_detector(e): m = torch.load('../data/speaker/model.e{}.pt'.format(e)) m = m.eval();", "2) ) # print(len(list(self.features.parameters()))) for p in list(self.features.parameters())[:20]: p.requires_grad =", "**kw): # X = F.softmax(self.basenet(X)) f = self.features(x) f =", "def __init__(self): super().__init__() resnet = resnet18(pretrained=True) self.features = torch.nn.Sequential(*list(resnet.children())[:-1]) self.classifier", "Module import torch.nn from torchvision.models import resnet18 from nn.speaker_dataset import", "= torch.nn.Sequential( torch.nn.Linear(512, 2) ) # print(len(list(self.features.parameters()))) for p in", "for p in list(self.features.parameters())[:20]: p.requires_grad = False def forward(self, x,", "nn.speaker_dataset import Dataset # @UnusedImport os.environ['TORCH_MODEL_ZOO'] = '../data/' VIDTIMIT_PATH =", "m = m.eval(); return m def get_speaking_detector(e): m = torch.load('../data/speaker/model.e{}.pt'.format(e))", "@UnusedImport os.environ['TORCH_MODEL_ZOO'] = '../data/' VIDTIMIT_PATH = '../data/vidtimit/' skimage.io.use_plugin('pil') class Net(Module):", "import Dataset # @UnusedImport os.environ['TORCH_MODEL_ZOO'] = '../data/' VIDTIMIT_PATH = '../data/vidtimit/'", "y = self.classifier(f) return y def get_speaking_detector_final(): m = torch.load('../data/speaker.pt')", "self.classifier = torch.nn.Sequential( torch.nn.Linear(512, 2) ) # print(len(list(self.features.parameters()))) for p", "# X = F.softmax(self.basenet(X)) f = self.features(x) f = f.view(f.size(0),", "= torch.load('../data/speaker.pt') m = m.eval(); return m def get_speaking_detector(e): m", "# @UnusedImport os.environ['TORCH_MODEL_ZOO'] = '../data/' VIDTIMIT_PATH = '../data/vidtimit/' skimage.io.use_plugin('pil') class", "Dataset # @UnusedImport os.environ['TORCH_MODEL_ZOO'] = '../data/' VIDTIMIT_PATH = '../data/vidtimit/' skimage.io.use_plugin('pil')", "torch.nn.Sequential(*list(resnet.children())[:-1]) self.classifier = torch.nn.Sequential( torch.nn.Linear(512, 2) ) # print(len(list(self.features.parameters()))) for", "-1) y = self.classifier(f) return y def get_speaking_detector_final(): m =", "= f.view(f.size(0), -1) y = self.classifier(f) return y def get_speaking_detector_final():", "skimage.io.use_plugin('pil') class Net(Module): def __init__(self): super().__init__() resnet = resnet18(pretrained=True) self.features", "= self.classifier(f) return y def get_speaking_detector_final(): m = torch.load('../data/speaker.pt') m", "resnet18 from nn.speaker_dataset import Dataset # @UnusedImport os.environ['TORCH_MODEL_ZOO'] = '../data/'", "from torchvision.models import resnet18 from nn.speaker_dataset import Dataset # @UnusedImport", "p.requires_grad = False def forward(self, x, **kw): # X =", "m = torch.load('../data/speaker.pt') m = m.eval(); return m def get_speaking_detector(e):", "list(self.features.parameters())[:20]: p.requires_grad = False def forward(self, x, **kw): # X", "import Module import torch.nn from torchvision.models import resnet18 from nn.speaker_dataset", "self.features(x) f = f.view(f.size(0), -1) y = self.classifier(f) return y", "def forward(self, x, **kw): # X = F.softmax(self.basenet(X)) f =", "resnet = resnet18(pretrained=True) self.features = torch.nn.Sequential(*list(resnet.children())[:-1]) self.classifier = torch.nn.Sequential( torch.nn.Linear(512,", "'../data/vidtimit/' skimage.io.use_plugin('pil') class Net(Module): def __init__(self): super().__init__() resnet = resnet18(pretrained=True)", ") # print(len(list(self.features.parameters()))) for p in list(self.features.parameters())[:20]: p.requires_grad = False", "m def get_speaking_detector(e): m = torch.load('../data/speaker/model.e{}.pt'.format(e)) m = m.eval(); return", "os import skimage.io from torch.nn import Module import torch.nn from", "in list(self.features.parameters())[:20]: p.requires_grad = False def forward(self, x, **kw): #", "super().__init__() resnet = resnet18(pretrained=True) self.features = torch.nn.Sequential(*list(resnet.children())[:-1]) self.classifier = torch.nn.Sequential(", "f = self.features(x) f = f.view(f.size(0), -1) y = self.classifier(f)", "skimage.io from torch.nn import Module import torch.nn from torchvision.models import", "f = f.view(f.size(0), -1) y = self.classifier(f) return y def", "__init__(self): super().__init__() resnet = resnet18(pretrained=True) self.features = torch.nn.Sequential(*list(resnet.children())[:-1]) self.classifier =", "'../data/' VIDTIMIT_PATH = '../data/vidtimit/' skimage.io.use_plugin('pil') class Net(Module): def __init__(self): super().__init__()", "= '../data/' VIDTIMIT_PATH = '../data/vidtimit/' skimage.io.use_plugin('pil') class Net(Module): def __init__(self):", "self.features = torch.nn.Sequential(*list(resnet.children())[:-1]) self.classifier = torch.nn.Sequential( torch.nn.Linear(512, 2) ) #", "False def forward(self, x, **kw): # X = F.softmax(self.basenet(X)) f", "return y def get_speaking_detector_final(): m = torch.load('../data/speaker.pt') m = m.eval();", "= False def forward(self, x, **kw): # X = F.softmax(self.basenet(X))", "# print(len(list(self.features.parameters()))) for p in list(self.features.parameters())[:20]: p.requires_grad = False def", "VIDTIMIT_PATH = '../data/vidtimit/' skimage.io.use_plugin('pil') class Net(Module): def __init__(self): super().__init__() resnet", "def get_speaking_detector_final(): m = torch.load('../data/speaker.pt') m = m.eval(); return m", "= self.features(x) f = f.view(f.size(0), -1) y = self.classifier(f) return", "def get_speaking_detector(e): m = torch.load('../data/speaker/model.e{}.pt'.format(e)) m = m.eval(); return m", "torch.nn.Linear(512, 2) ) # print(len(list(self.features.parameters()))) for p in list(self.features.parameters())[:20]: p.requires_grad", "resnet18(pretrained=True) self.features = torch.nn.Sequential(*list(resnet.children())[:-1]) self.classifier = torch.nn.Sequential( torch.nn.Linear(512, 2) )", "torch.nn import Module import torch.nn from torchvision.models import resnet18 from", "x, **kw): # X = F.softmax(self.basenet(X)) f = self.features(x) f", "= m.eval(); return m def get_speaking_detector(e): m = torch.load('../data/speaker/model.e{}.pt'.format(e)) m", "= F.softmax(self.basenet(X)) f = self.features(x) f = f.view(f.size(0), -1) y", "torch.nn from torchvision.models import resnet18 from nn.speaker_dataset import Dataset #", "class Net(Module): def __init__(self): super().__init__() resnet = resnet18(pretrained=True) self.features =", "import torch.nn from torchvision.models import resnet18 from nn.speaker_dataset import Dataset", "= '../data/vidtimit/' skimage.io.use_plugin('pil') class Net(Module): def __init__(self): super().__init__() resnet =", "forward(self, x, **kw): # X = F.softmax(self.basenet(X)) f = self.features(x)", "torchvision.models import resnet18 from nn.speaker_dataset import Dataset # @UnusedImport os.environ['TORCH_MODEL_ZOO']", "Net(Module): def __init__(self): super().__init__() resnet = resnet18(pretrained=True) self.features = torch.nn.Sequential(*list(resnet.children())[:-1])", "f.view(f.size(0), -1) y = self.classifier(f) return y def get_speaking_detector_final(): m", "torch.load('../data/speaker.pt') m = m.eval(); return m def get_speaking_detector(e): m =", "= torch.nn.Sequential(*list(resnet.children())[:-1]) self.classifier = torch.nn.Sequential( torch.nn.Linear(512, 2) ) # print(len(list(self.features.parameters())))", "import skimage.io from torch.nn import Module import torch.nn from torchvision.models", "from torch.nn import Module import torch.nn from torchvision.models import resnet18", "import resnet18 from nn.speaker_dataset import Dataset # @UnusedImport os.environ['TORCH_MODEL_ZOO'] =", "p in list(self.features.parameters())[:20]: p.requires_grad = False def forward(self, x, **kw):", "y def get_speaking_detector_final(): m = torch.load('../data/speaker.pt') m = m.eval(); return", "F.softmax(self.basenet(X)) f = self.features(x) f = f.view(f.size(0), -1) y =" ]
[ "obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0", "+ d + e self.assertEqual(1 + 2 + 3 +", "LLC # # Licensed under the Apache License, Version 2.0", "from tensorflow_model_analysis import util class UtilTest(tf.test.TestCase): def testKwargsOnly(self): @util.kwargs_only def", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "distributed under the License is distributed on an \"AS IS\"", "e = 1000 return a + b + c +", "100 + 1000, fn(a=1, b=2, c=3, e=None)) with self.assertRaisesRegexp(TypeError, 'keyword-arguments", "the specific language governing permissions and # limitations under the", "with self.assertRaisesRegexp(TypeError, 'with c specified'): fn(a=1, b=2, e=5) # pylint:", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "b + c + d + e self.assertEqual(1 + 2", "# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 #", "b=2, c=3, e=None)) with self.assertRaisesRegexp(TypeError, 'keyword-arguments only'): fn(1, 2, 3)", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "b=2, c=3)) self.assertEqual(1 + 2 + 3 + 100 +", "except in compliance with the License. # You may obtain", "# pylint: disable=no-value-for-parameter with self.assertRaisesRegexp(TypeError, 'with extraneous kwargs'): fn(a=1, b=2,", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "+ e self.assertEqual(1 + 2 + 3 + 100 +", "e=5): if d is None: d = 100 if e", "2, 3) with self.assertRaisesRegexp(TypeError, 'with c specified'): fn(a=1, b=2, e=5)", "copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # #", "Google LLC # # Licensed under the Apache License, Version", "fn(a=1, b=2, c=3, e=None)) with self.assertRaisesRegexp(TypeError, 'keyword-arguments only'): fn(1, 2,", "tensorflow as tf from tensorflow_model_analysis import util class UtilTest(tf.test.TestCase): def", "writing, software # distributed under the License is distributed on", "in writing, software # distributed under the License is distributed", "you may not use this file except in compliance with", "+ 2 + 3 + 100 + 1000, fn(a=1, b=2,", "permissions and # limitations under the License. \"\"\"Simple tests for", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "+ 100 + 1000, fn(a=1, b=2, c=3, e=None)) with self.assertRaisesRegexp(TypeError,", "kwargs'): fn(a=1, b=2, c=3, f=11) # pylint: disable=unexpected-keyword-arg if __name__", "Copyright 2018 Google LLC # # Licensed under the Apache", "'with c specified'): fn(a=1, b=2, e=5) # pylint: disable=no-value-for-parameter with", "use this file except in compliance with the License. #", "d + e self.assertEqual(1 + 2 + 3 + 100", "= 100 if e is None: e = 1000 return", "d is None: d = 100 if e is None:", "for util.\"\"\" from __future__ import absolute_import from __future__ import division", "tf from tensorflow_model_analysis import util class UtilTest(tf.test.TestCase): def testKwargsOnly(self): @util.kwargs_only", "CONDITIONS OF ANY KIND, either express or implied. # See", "the License. \"\"\"Simple tests for util.\"\"\" from __future__ import absolute_import", "100 + 5, fn(a=1, b=2, c=3)) self.assertEqual(1 + 2 +", "util class UtilTest(tf.test.TestCase): def testKwargsOnly(self): @util.kwargs_only def fn(a, b, c,", "a + b + c + d + e self.assertEqual(1", "fn(a=1, b=2, c=3)) self.assertEqual(1 + 2 + 3 + 100", "def fn(a, b, c, d=None, e=5): if d is None:", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "c specified'): fn(a=1, b=2, e=5) # pylint: disable=no-value-for-parameter with self.assertRaisesRegexp(TypeError,", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "'with extraneous kwargs'): fn(a=1, b=2, c=3, f=11) # pylint: disable=unexpected-keyword-arg", "of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless", "+ 1000, fn(a=1, b=2, c=3, e=None)) with self.assertRaisesRegexp(TypeError, 'keyword-arguments only'):", "'keyword-arguments only'): fn(1, 2, 3) with self.assertRaisesRegexp(TypeError, 'with c specified'):", "# You may obtain a copy of the License at", "import print_function import tensorflow as tf from tensorflow_model_analysis import util", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "__future__ import print_function import tensorflow as tf from tensorflow_model_analysis import", "@util.kwargs_only def fn(a, b, c, d=None, e=5): if d is", "under the License is distributed on an \"AS IS\" BASIS,", "License for the specific language governing permissions and # limitations", "tests for util.\"\"\" from __future__ import absolute_import from __future__ import", "<reponame>mdreves/model-analysis # Copyright 2018 Google LLC # # Licensed under", "c=3, f=11) # pylint: disable=unexpected-keyword-arg if __name__ == '__main__': tf.test.main()", "c + d + e self.assertEqual(1 + 2 + 3", "UtilTest(tf.test.TestCase): def testKwargsOnly(self): @util.kwargs_only def fn(a, b, c, d=None, e=5):", "testKwargsOnly(self): @util.kwargs_only def fn(a, b, c, d=None, e=5): if d", "is None: e = 1000 return a + b +", "disable=no-value-for-parameter with self.assertRaisesRegexp(TypeError, 'with extraneous kwargs'): fn(a=1, b=2, c=3, f=11)", "d=None, e=5): if d is None: d = 100 if", "the License for the specific language governing permissions and #", "License. \"\"\"Simple tests for util.\"\"\" from __future__ import absolute_import from", "(the \"License\"); # you may not use this file except", "if d is None: d = 100 if e is", "b=2, e=5) # pylint: disable=no-value-for-parameter with self.assertRaisesRegexp(TypeError, 'with extraneous kwargs'):", "Apache License, Version 2.0 (the \"License\"); # you may not", "b=2, c=3, f=11) # pylint: disable=unexpected-keyword-arg if __name__ == '__main__':", "# you may not use this file except in compliance", "either express or implied. # See the License for the", "License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "OR CONDITIONS OF ANY KIND, either express or implied. #", "with self.assertRaisesRegexp(TypeError, 'with extraneous kwargs'): fn(a=1, b=2, c=3, f=11) #", "+ 5, fn(a=1, b=2, c=3)) self.assertEqual(1 + 2 + 3", "if e is None: e = 1000 return a +", "the License is distributed on an \"AS IS\" BASIS, #", "from __future__ import absolute_import from __future__ import division from __future__", "3) with self.assertRaisesRegexp(TypeError, 'with c specified'): fn(a=1, b=2, e=5) #", "in compliance with the License. # You may obtain a", "limitations under the License. \"\"\"Simple tests for util.\"\"\" from __future__", "e self.assertEqual(1 + 2 + 3 + 100 + 5,", "= 1000 return a + b + c + d", "software # distributed under the License is distributed on an", "self.assertEqual(1 + 2 + 3 + 100 + 1000, fn(a=1,", "+ 2 + 3 + 100 + 5, fn(a=1, b=2,", "1000 return a + b + c + d +", "e=None)) with self.assertRaisesRegexp(TypeError, 'keyword-arguments only'): fn(1, 2, 3) with self.assertRaisesRegexp(TypeError,", "c=3)) self.assertEqual(1 + 2 + 3 + 100 + 1000,", "self.assertRaisesRegexp(TypeError, 'with extraneous kwargs'): fn(a=1, b=2, c=3, f=11) # pylint:", "None: e = 1000 return a + b + c", "# # Unless required by applicable law or agreed to", "class UtilTest(tf.test.TestCase): def testKwargsOnly(self): @util.kwargs_only def fn(a, b, c, d=None,", "b, c, d=None, e=5): if d is None: d =", "as tf from tensorflow_model_analysis import util class UtilTest(tf.test.TestCase): def testKwargsOnly(self):", "and # limitations under the License. \"\"\"Simple tests for util.\"\"\"", "2 + 3 + 100 + 1000, fn(a=1, b=2, c=3,", "fn(1, 2, 3) with self.assertRaisesRegexp(TypeError, 'with c specified'): fn(a=1, b=2,", "under the License. \"\"\"Simple tests for util.\"\"\" from __future__ import", "fn(a, b, c, d=None, e=5): if d is None: d", "from __future__ import division from __future__ import print_function import tensorflow", "Version 2.0 (the \"License\"); # you may not use this", "+ 100 + 5, fn(a=1, b=2, c=3)) self.assertEqual(1 + 2", "+ c + d + e self.assertEqual(1 + 2 +", "__future__ import division from __future__ import print_function import tensorflow as", "law or agreed to in writing, software # distributed under", "tensorflow_model_analysis import util class UtilTest(tf.test.TestCase): def testKwargsOnly(self): @util.kwargs_only def fn(a,", "absolute_import from __future__ import division from __future__ import print_function import", "+ 3 + 100 + 5, fn(a=1, b=2, c=3)) self.assertEqual(1", "fn(a=1, b=2, c=3, f=11) # pylint: disable=unexpected-keyword-arg if __name__ ==", "from __future__ import print_function import tensorflow as tf from tensorflow_model_analysis", "__future__ import absolute_import from __future__ import division from __future__ import", "# Copyright 2018 Google LLC # # Licensed under the", "import util class UtilTest(tf.test.TestCase): def testKwargsOnly(self): @util.kwargs_only def fn(a, b,", "pylint: disable=no-value-for-parameter with self.assertRaisesRegexp(TypeError, 'with extraneous kwargs'): fn(a=1, b=2, c=3,", "def testKwargsOnly(self): @util.kwargs_only def fn(a, b, c, d=None, e=5): if", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "None: d = 100 if e is None: e =", "\"License\"); # you may not use this file except in", "fn(a=1, b=2, e=5) # pylint: disable=no-value-for-parameter with self.assertRaisesRegexp(TypeError, 'with extraneous", "3 + 100 + 5, fn(a=1, b=2, c=3)) self.assertEqual(1 +", "util.\"\"\" from __future__ import absolute_import from __future__ import division from", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "d = 100 if e is None: e = 1000", "# # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "e is None: e = 1000 return a + b", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "2 + 3 + 100 + 5, fn(a=1, b=2, c=3))", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "c=3, e=None)) with self.assertRaisesRegexp(TypeError, 'keyword-arguments only'): fn(1, 2, 3) with", "is None: d = 100 if e is None: e", "print_function import tensorflow as tf from tensorflow_model_analysis import util class", "specified'): fn(a=1, b=2, e=5) # pylint: disable=no-value-for-parameter with self.assertRaisesRegexp(TypeError, 'with", "the License. # You may obtain a copy of the", "for the specific language governing permissions and # limitations under", "+ 3 + 100 + 1000, fn(a=1, b=2, c=3, e=None))", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "self.assertRaisesRegexp(TypeError, 'keyword-arguments only'): fn(1, 2, 3) with self.assertRaisesRegexp(TypeError, 'with c", "to in writing, software # distributed under the License is", "with self.assertRaisesRegexp(TypeError, 'keyword-arguments only'): fn(1, 2, 3) with self.assertRaisesRegexp(TypeError, 'with", "division from __future__ import print_function import tensorflow as tf from", "at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "language governing permissions and # limitations under the License. \"\"\"Simple", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# See the License for the specific language governing permissions", "extraneous kwargs'): fn(a=1, b=2, c=3, f=11) # pylint: disable=unexpected-keyword-arg if", "You may obtain a copy of the License at #", "import division from __future__ import print_function import tensorflow as tf", "self.assertRaisesRegexp(TypeError, 'with c specified'): fn(a=1, b=2, e=5) # pylint: disable=no-value-for-parameter", "only'): fn(1, 2, 3) with self.assertRaisesRegexp(TypeError, 'with c specified'): fn(a=1,", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "3 + 100 + 1000, fn(a=1, b=2, c=3, e=None)) with", "required by applicable law or agreed to in writing, software", "100 if e is None: e = 1000 return a", "5, fn(a=1, b=2, c=3)) self.assertEqual(1 + 2 + 3 +", "import tensorflow as tf from tensorflow_model_analysis import util class UtilTest(tf.test.TestCase):", "+ b + c + d + e self.assertEqual(1 +", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "with the License. # You may obtain a copy of", "c, d=None, e=5): if d is None: d = 100", "this file except in compliance with the License. # You", "1000, fn(a=1, b=2, c=3, e=None)) with self.assertRaisesRegexp(TypeError, 'keyword-arguments only'): fn(1,", "e=5) # pylint: disable=no-value-for-parameter with self.assertRaisesRegexp(TypeError, 'with extraneous kwargs'): fn(a=1,", "the Apache License, Version 2.0 (the \"License\"); # you may", "governing permissions and # limitations under the License. \"\"\"Simple tests", "return a + b + c + d + e", "import absolute_import from __future__ import division from __future__ import print_function", "2018 Google LLC # # Licensed under the Apache License,", "\"\"\"Simple tests for util.\"\"\" from __future__ import absolute_import from __future__", "self.assertEqual(1 + 2 + 3 + 100 + 5, fn(a=1,", "https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "# limitations under the License. \"\"\"Simple tests for util.\"\"\" from" ]
[ "None: assert filter_size_min % 2 != 0, \"Filter size must", "def __call__(self, *args, **kwargs): return NotImplementedError def __repr__(self) -> str:", "self, filter_size_min: int = 3, filter_size_max: int = 7, alpha:", "__repr__(self) -> str: return ( f\"vision.{self.__class__.__name__}(\" f\"filter_size_min={self.filter_size_min}, \" f\"filter_size_max={self.filter_size_max}, \"", "args = parser.parse_args() transformer = Dilate() if args.operation == \"dilate\"", "filter_sizes = [] filter_probs = [] for k in range(n):", "float ) -> Tuple[List[int], Union[List[float], np.ndarray]]: n = (filter_size_max -", "alpha, beta ) @staticmethod def _create_filter_distribution( filter_size_min: int, filter_size_max: int,", "int, alpha: float, beta: float ) -> Tuple[List[int], Union[List[float], np.ndarray]]:", "filter_size_max % 2 != 0, \"Filter size must be odd\"", "= filter_size_min self.filter_size_max = filter_size_max self.alpha = alpha self.beta =", "*args, **kwargs): return NotImplementedError def __repr__(self) -> str: return (", ") @staticmethod def _create_filter_distribution( filter_size_min: int, filter_size_max: int, alpha: float,", "= 1, beta: float = 3, ) -> None: super().__init__(filter_size_min,", "% 2 != 0, \"Filter size must be odd\" assert", "Tuple, Union import numpy as np import scipy.special from PIL", "* scipy.special.beta(alpha + k, n - k + beta) )", "self.filter_size_min = filter_size_min self.filter_size_max = filter_size_max self.alpha = alpha self.beta", "filter_size_max: int = 5, alpha: float = 1, beta: float", "5, alpha: float = 1, beta: float = 3, )", "= [] filter_probs = [] for k in range(n): filter_sizes.append(filter_size_min", "Dilate(RandomBetaMorphology): def __init__( self, filter_size_min: int = 3, filter_size_max: int", "/ np_filter_probs.sum() return filter_sizes, np_filter_probs def sample_filter_size(self): filter_size = np.random.choice(self.filter_sizes,", "return img.filter(ImageFilter.MinFilter(filter_size)) if __name__ == \"__main__\": import argparse from PIL", "int = 7, alpha: float = 1, beta: float =", "def __repr__(self) -> str: return ( f\"vision.{self.__class__.__name__}(\" f\"filter_size_min={self.filter_size_min}, \" f\"filter_size_max={self.filter_size_max},", "parser.add_argument(\"images\", type=argparse.FileType(\"rb\"), nargs=\"+\") args = parser.parse_args() transformer = Dilate() if", "nargs=\"+\") args = parser.parse_args() transformer = Dilate() if args.operation ==", "w, h = x.size z = Image.new(\"L\", (w, 2 *", "odd\" assert filter_size_max % 2 != 0, \"Filter size must", "beta: float = 3, ) -> None: super().__init__(filter_size_min, filter_size_max, alpha,", "= 5, alpha: float = 1, beta: float = 3,", "parser.parse_args() transformer = Dilate() if args.operation == \"dilate\" else Erode()", "else Erode() for f in args.images: x = Image.open(f, \"r\").convert(\"L\")", "be odd\" assert filter_size_max % 2 != 0, \"Filter size", "h = x.size z = Image.new(\"L\", (w, 2 * h))", "int = 3, filter_size_max: int = 7, alpha: float =", "= 3, filter_size_max: int = 7, alpha: float = 1,", "= [] for k in range(n): filter_sizes.append(filter_size_min + 2 *", "1, beta: float = 3, ) -> None: super().__init__(filter_size_min, filter_size_max,", "(0, 0)) z.paste(y, (0, h)) z = z.resize(size=(w // 2,", "__init__( self, filter_size_min: int, filter_size_max: int, alpha: float, beta: float", "filter_size_min self.filter_size_max = filter_size_max self.alpha = alpha self.beta = beta", "return NotImplementedError def __repr__(self) -> str: return ( f\"vision.{self.__class__.__name__}(\" f\"filter_size_min={self.filter_size_min},", "alpha, beta) def __call__(self, img: Image) -> Image: filter_size =", "if args.operation == \"dilate\" else Erode() for f in args.images:", "dtype=np.float32) np_filter_probs = filter_probs / np_filter_probs.sum() return filter_sizes, np_filter_probs def", "3, filter_size_max: int = 7, alpha: float = 1, beta:", "\"erode\"), default=\"dilate\") parser.add_argument(\"images\", type=argparse.FileType(\"rb\"), nargs=\"+\") args = parser.parse_args() transformer =", "sample_filter_size(self): filter_size = np.random.choice(self.filter_sizes, p=self.filter_probs) return filter_size def __call__(self, *args,", "Image) -> Image: filter_size = self.sample_filter_size() return img.filter(ImageFilter.MaxFilter(filter_size)) class Erode(RandomBetaMorphology):", "int, filter_size_max: int, alpha: float, beta: float ) -> Tuple[List[int],", "k, n - k + beta) ) np_filter_probs = np.asarray(filter_probs,", "beta: float ) -> None: assert filter_size_min % 2 !=", "type=argparse.FileType(\"rb\"), nargs=\"+\") args = parser.parse_args() transformer = Dilate() if args.operation", "size must be odd\" assert filter_size_max % 2 != 0,", "Image: filter_size = self.sample_filter_size() return img.filter(ImageFilter.MinFilter(filter_size)) if __name__ == \"__main__\":", "n - k + beta) ) np_filter_probs = np.asarray(filter_probs, dtype=np.float32)", "7, alpha: float = 1, beta: float = 3, )", "!= 0, \"Filter size must be odd\" assert filter_size_max %", "def sample_filter_size(self): filter_size = np.random.choice(self.filter_sizes, p=self.filter_probs) return filter_size def __call__(self,", "\"Filter size must be odd\" self.filter_size_min = filter_size_min self.filter_size_max =", "k in range(n): filter_sizes.append(filter_size_min + 2 * k) filter_probs.append( scipy.special.comb(n,", "filter_size def __call__(self, *args, **kwargs): return NotImplementedError def __repr__(self) ->", "super().__init__(filter_size_min, filter_size_max, alpha, beta) def __call__(self, img: Image) -> Image:", "float, beta: float ) -> Tuple[List[int], Union[List[float], np.ndarray]]: n =", "filter_size_min % 2 != 0, \"Filter size must be odd\"", "np.asarray(filter_probs, dtype=np.float32) np_filter_probs = filter_probs / np_filter_probs.sum() return filter_sizes, np_filter_probs", "filter_size_min: int = 3, filter_size_max: int = 7, alpha: float", "transformer = Dilate() if args.operation == \"dilate\" else Erode() for", "import argparse from PIL import ImageOps parser = argparse.ArgumentParser() parser.add_argument(\"--operation\",", "filter_probs.append( scipy.special.comb(n, k) * scipy.special.beta(alpha + k, n - k", "y = transformer(x) w, h = x.size z = Image.new(\"L\",", "filter_size_max: int, alpha: float, beta: float ) -> None: assert", "2 * k) filter_probs.append( scipy.special.comb(n, k) * scipy.special.beta(alpha + k,", "np_filter_probs def sample_filter_size(self): filter_size = np.random.choice(self.filter_sizes, p=self.filter_probs) return filter_size def", "np_filter_probs.sum() return filter_sizes, np_filter_probs def sample_filter_size(self): filter_size = np.random.choice(self.filter_sizes, p=self.filter_probs)", "class Erode(RandomBetaMorphology): def __init__( self, filter_size_min: int = 3, filter_size_max:", "n < 2: return [filter_size_min], np.asarray([1.0], dtype=np.float32) filter_sizes = []", "img.filter(ImageFilter.MaxFilter(filter_size)) class Erode(RandomBetaMorphology): def __init__( self, filter_size_min: int = 3,", "filter_size_min: int = 3, filter_size_max: int = 5, alpha: float", "self.beta = beta self.filter_sizes, self.filter_probs = self._create_filter_distribution( filter_size_min, filter_size_max, alpha,", "!= 0, \"Filter size must be odd\" self.filter_size_min = filter_size_min", "self.filter_sizes, self.filter_probs = self._create_filter_distribution( filter_size_min, filter_size_max, alpha, beta ) @staticmethod", "beta: float ) -> Tuple[List[int], Union[List[float], np.ndarray]]: n = (filter_size_max", "Erode(RandomBetaMorphology): def __init__( self, filter_size_min: int = 3, filter_size_max: int", "= argparse.ArgumentParser() parser.add_argument(\"--operation\", choices=(\"dilate\", \"erode\"), default=\"dilate\") parser.add_argument(\"images\", type=argparse.FileType(\"rb\"), nargs=\"+\") args", "+ beta) ) np_filter_probs = np.asarray(filter_probs, dtype=np.float32) np_filter_probs = filter_probs", "__call__(self, *args, **kwargs): return NotImplementedError def __repr__(self) -> str: return", "img.filter(ImageFilter.MinFilter(filter_size)) if __name__ == \"__main__\": import argparse from PIL import", ") -> Tuple[List[int], Union[List[float], np.ndarray]]: n = (filter_size_max - filter_size_min)", ") class Dilate(RandomBetaMorphology): def __init__( self, filter_size_min: int = 3,", "args.images: x = Image.open(f, \"r\").convert(\"L\") x = ImageOps.invert(x) y =", "= ImageOps.invert(x) y = transformer(x) w, h = x.size z", "__name__ == \"__main__\": import argparse from PIL import ImageOps parser", "argparse from PIL import ImageOps parser = argparse.ArgumentParser() parser.add_argument(\"--operation\", choices=(\"dilate\",", "img: Image) -> Image: filter_size = self.sample_filter_size() return img.filter(ImageFilter.MinFilter(filter_size)) if", "z.paste(y, (0, h)) z = z.resize(size=(w // 2, h), resample=Image.BICUBIC)", "( f\"vision.{self.__class__.__name__}(\" f\"filter_size_min={self.filter_size_min}, \" f\"filter_size_max={self.filter_size_max}, \" f\"alpha={self.alpha}, beta={self.beta})\" ) class", "x.size z = Image.new(\"L\", (w, 2 * h)) z.paste(x, (0,", "assert filter_size_min % 2 != 0, \"Filter size must be", "k + beta) ) np_filter_probs = np.asarray(filter_probs, dtype=np.float32) np_filter_probs =", "scipy.special.comb(n, k) * scipy.special.beta(alpha + k, n - k +", "RandomBetaMorphology: def __init__( self, filter_size_min: int, filter_size_max: int, alpha: float,", "np_filter_probs = np.asarray(filter_probs, dtype=np.float32) np_filter_probs = filter_probs / np_filter_probs.sum() return", "NotImplementedError def __repr__(self) -> str: return ( f\"vision.{self.__class__.__name__}(\" f\"filter_size_min={self.filter_size_min}, \"", "(0, h)) z = z.resize(size=(w // 2, h), resample=Image.BICUBIC) z.show()", "def __call__(self, img: Image) -> Image: filter_size = self.sample_filter_size() return", "must be odd\" assert filter_size_max % 2 != 0, \"Filter", "scipy.special.beta(alpha + k, n - k + beta) ) np_filter_probs", "= 3, ) -> None: super().__init__(filter_size_min, filter_size_max, alpha, beta) def", "* h)) z.paste(x, (0, 0)) z.paste(y, (0, h)) z =", "f\"filter_size_max={self.filter_size_max}, \" f\"alpha={self.alpha}, beta={self.beta})\" ) class Dilate(RandomBetaMorphology): def __init__( self,", "Union import numpy as np import scipy.special from PIL import", ") np_filter_probs = np.asarray(filter_probs, dtype=np.float32) np_filter_probs = filter_probs / np_filter_probs.sum()", "(filter_size_max - filter_size_min) // 2 + 1 if n <", "class Dilate(RandomBetaMorphology): def __init__( self, filter_size_min: int = 3, filter_size_max:", "ImageFilter class RandomBetaMorphology: def __init__( self, filter_size_min: int, filter_size_max: int,", "= (filter_size_max - filter_size_min) // 2 + 1 if n", "// 2 + 1 if n < 2: return [filter_size_min],", "alpha self.beta = beta self.filter_sizes, self.filter_probs = self._create_filter_distribution( filter_size_min, filter_size_max,", "_create_filter_distribution( filter_size_min: int, filter_size_max: int, alpha: float, beta: float )", "import numpy as np import scipy.special from PIL import Image,", "scipy.special from PIL import Image, ImageFilter class RandomBetaMorphology: def __init__(", "self.filter_probs = self._create_filter_distribution( filter_size_min, filter_size_max, alpha, beta ) @staticmethod def", "+ 1 if n < 2: return [filter_size_min], np.asarray([1.0], dtype=np.float32)", ") -> None: super().__init__(filter_size_min, filter_size_max, alpha, beta) def __call__(self, img:", "z = Image.new(\"L\", (w, 2 * h)) z.paste(x, (0, 0))", "if __name__ == \"__main__\": import argparse from PIL import ImageOps", "filter_size_max self.alpha = alpha self.beta = beta self.filter_sizes, self.filter_probs =", "np.ndarray]]: n = (filter_size_max - filter_size_min) // 2 + 1", "= beta self.filter_sizes, self.filter_probs = self._create_filter_distribution( filter_size_min, filter_size_max, alpha, beta", "1 if n < 2: return [filter_size_min], np.asarray([1.0], dtype=np.float32) filter_sizes", "if n < 2: return [filter_size_min], np.asarray([1.0], dtype=np.float32) filter_sizes =", ") -> None: assert filter_size_min % 2 != 0, \"Filter", "filter_size_max, alpha, beta) def __call__(self, img: Image) -> Image: filter_size", "np.random.choice(self.filter_sizes, p=self.filter_probs) return filter_size def __call__(self, *args, **kwargs): return NotImplementedError", "@staticmethod def _create_filter_distribution( filter_size_min: int, filter_size_max: int, alpha: float, beta:", "= filter_probs / np_filter_probs.sum() return filter_sizes, np_filter_probs def sample_filter_size(self): filter_size", "return filter_sizes, np_filter_probs def sample_filter_size(self): filter_size = np.random.choice(self.filter_sizes, p=self.filter_probs) return", "2 + 1 if n < 2: return [filter_size_min], np.asarray([1.0],", "= 7, alpha: float = 1, beta: float = 3,", "__init__( self, filter_size_min: int = 3, filter_size_max: int = 5,", "self, filter_size_min: int = 3, filter_size_max: int = 5, alpha:", "filter_size = self.sample_filter_size() return img.filter(ImageFilter.MinFilter(filter_size)) if __name__ == \"__main__\": import", "argparse.ArgumentParser() parser.add_argument(\"--operation\", choices=(\"dilate\", \"erode\"), default=\"dilate\") parser.add_argument(\"images\", type=argparse.FileType(\"rb\"), nargs=\"+\") args =", "self.sample_filter_size() return img.filter(ImageFilter.MinFilter(filter_size)) if __name__ == \"__main__\": import argparse from", "filter_size_min: int, filter_size_max: int, alpha: float, beta: float ) ->", "be odd\" self.filter_size_min = filter_size_min self.filter_size_max = filter_size_max self.alpha =", "return ( f\"vision.{self.__class__.__name__}(\" f\"filter_size_min={self.filter_size_min}, \" f\"filter_size_max={self.filter_size_max}, \" f\"alpha={self.alpha}, beta={self.beta})\" )", "f\"filter_size_min={self.filter_size_min}, \" f\"filter_size_max={self.filter_size_max}, \" f\"alpha={self.alpha}, beta={self.beta})\" ) class Dilate(RandomBetaMorphology): def", "== \"__main__\": import argparse from PIL import ImageOps parser =", "float ) -> None: assert filter_size_min % 2 != 0,", "default=\"dilate\") parser.add_argument(\"images\", type=argparse.FileType(\"rb\"), nargs=\"+\") args = parser.parse_args() transformer = Dilate()", "range(n): filter_sizes.append(filter_size_min + 2 * k) filter_probs.append( scipy.special.comb(n, k) *", "[] filter_probs = [] for k in range(n): filter_sizes.append(filter_size_min +", "Image, ImageFilter class RandomBetaMorphology: def __init__( self, filter_size_min: int, filter_size_max:", "filter_probs = [] for k in range(n): filter_sizes.append(filter_size_min + 2", "beta ) @staticmethod def _create_filter_distribution( filter_size_min: int, filter_size_max: int, alpha:", "parser = argparse.ArgumentParser() parser.add_argument(\"--operation\", choices=(\"dilate\", \"erode\"), default=\"dilate\") parser.add_argument(\"images\", type=argparse.FileType(\"rb\"), nargs=\"+\")", "-> None: assert filter_size_min % 2 != 0, \"Filter size", "PIL import ImageOps parser = argparse.ArgumentParser() parser.add_argument(\"--operation\", choices=(\"dilate\", \"erode\"), default=\"dilate\")", "self, filter_size_min: int, filter_size_max: int, alpha: float, beta: float )", "\"__main__\": import argparse from PIL import ImageOps parser = argparse.ArgumentParser()", "= Dilate() if args.operation == \"dilate\" else Erode() for f", "2: return [filter_size_min], np.asarray([1.0], dtype=np.float32) filter_sizes = [] filter_probs =", "filter_probs / np_filter_probs.sum() return filter_sizes, np_filter_probs def sample_filter_size(self): filter_size =", "2 != 0, \"Filter size must be odd\" self.filter_size_min =", "__call__(self, img: Image) -> Image: filter_size = self.sample_filter_size() return img.filter(ImageFilter.MaxFilter(filter_size))", "x = ImageOps.invert(x) y = transformer(x) w, h = x.size", "img: Image) -> Image: filter_size = self.sample_filter_size() return img.filter(ImageFilter.MaxFilter(filter_size)) class", "= self.sample_filter_size() return img.filter(ImageFilter.MaxFilter(filter_size)) class Erode(RandomBetaMorphology): def __init__( self, filter_size_min:", "= self._create_filter_distribution( filter_size_min, filter_size_max, alpha, beta ) @staticmethod def _create_filter_distribution(", "< 2: return [filter_size_min], np.asarray([1.0], dtype=np.float32) filter_sizes = [] filter_probs", "float = 3, ) -> None: super().__init__(filter_size_min, filter_size_max, alpha, beta)", "odd\" self.filter_size_min = filter_size_min self.filter_size_max = filter_size_max self.alpha = alpha", "\"Filter size must be odd\" assert filter_size_max % 2 !=", "Dilate() if args.operation == \"dilate\" else Erode() for f in", "__call__(self, img: Image) -> Image: filter_size = self.sample_filter_size() return img.filter(ImageFilter.MinFilter(filter_size))", "def __init__( self, filter_size_min: int = 3, filter_size_max: int =", "= parser.parse_args() transformer = Dilate() if args.operation == \"dilate\" else", "in args.images: x = Image.open(f, \"r\").convert(\"L\") x = ImageOps.invert(x) y", "= filter_size_max self.alpha = alpha self.beta = beta self.filter_sizes, self.filter_probs", "- k + beta) ) np_filter_probs = np.asarray(filter_probs, dtype=np.float32) np_filter_probs", "PIL import Image, ImageFilter class RandomBetaMorphology: def __init__( self, filter_size_min:", "dtype=np.float32) filter_sizes = [] filter_probs = [] for k in", "(w, 2 * h)) z.paste(x, (0, 0)) z.paste(y, (0, h))", "numpy as np import scipy.special from PIL import Image, ImageFilter", "import scipy.special from PIL import Image, ImageFilter class RandomBetaMorphology: def", "filter_size_max: int = 7, alpha: float = 1, beta: float", "= self.sample_filter_size() return img.filter(ImageFilter.MinFilter(filter_size)) if __name__ == \"__main__\": import argparse", "2 * h)) z.paste(x, (0, 0)) z.paste(y, (0, h)) z", "= alpha self.beta = beta self.filter_sizes, self.filter_probs = self._create_filter_distribution( filter_size_min,", "-> None: super().__init__(filter_size_min, filter_size_max, alpha, beta) def __call__(self, img: Image)", "self.filter_size_max = filter_size_max self.alpha = alpha self.beta = beta self.filter_sizes,", "in range(n): filter_sizes.append(filter_size_min + 2 * k) filter_probs.append( scipy.special.comb(n, k)", "beta={self.beta})\" ) class Dilate(RandomBetaMorphology): def __init__( self, filter_size_min: int =", "choices=(\"dilate\", \"erode\"), default=\"dilate\") parser.add_argument(\"images\", type=argparse.FileType(\"rb\"), nargs=\"+\") args = parser.parse_args() transformer", "self.sample_filter_size() return img.filter(ImageFilter.MaxFilter(filter_size)) class Erode(RandomBetaMorphology): def __init__( self, filter_size_min: int", "-> Image: filter_size = self.sample_filter_size() return img.filter(ImageFilter.MaxFilter(filter_size)) class Erode(RandomBetaMorphology): def", "__init__( self, filter_size_min: int = 3, filter_size_max: int = 7,", "from PIL import ImageOps parser = argparse.ArgumentParser() parser.add_argument(\"--operation\", choices=(\"dilate\", \"erode\"),", "f in args.images: x = Image.open(f, \"r\").convert(\"L\") x = ImageOps.invert(x)", "0)) z.paste(y, (0, h)) z = z.resize(size=(w // 2, h),", "2 != 0, \"Filter size must be odd\" assert filter_size_max", "filter_size_min) // 2 + 1 if n < 2: return", "class RandomBetaMorphology: def __init__( self, filter_size_min: int, filter_size_max: int, alpha:", "assert filter_size_max % 2 != 0, \"Filter size must be", "alpha: float, beta: float ) -> None: assert filter_size_min %", "filter_sizes, np_filter_probs def sample_filter_size(self): filter_size = np.random.choice(self.filter_sizes, p=self.filter_probs) return filter_size", "return [filter_size_min], np.asarray([1.0], dtype=np.float32) filter_sizes = [] filter_probs = []", "typing import List, Tuple, Union import numpy as np import", "Union[List[float], np.ndarray]]: n = (filter_size_max - filter_size_min) // 2 +", "def _create_filter_distribution( filter_size_min: int, filter_size_max: int, alpha: float, beta: float", "f\"vision.{self.__class__.__name__}(\" f\"filter_size_min={self.filter_size_min}, \" f\"filter_size_max={self.filter_size_max}, \" f\"alpha={self.alpha}, beta={self.beta})\" ) class Dilate(RandomBetaMorphology):", "for f in args.images: x = Image.open(f, \"r\").convert(\"L\") x =", "x = Image.open(f, \"r\").convert(\"L\") x = ImageOps.invert(x) y = transformer(x)", "from PIL import Image, ImageFilter class RandomBetaMorphology: def __init__( self,", "z.paste(x, (0, 0)) z.paste(y, (0, h)) z = z.resize(size=(w //", "- filter_size_min) // 2 + 1 if n < 2:", "\"dilate\" else Erode() for f in args.images: x = Image.open(f,", "k) filter_probs.append( scipy.special.comb(n, k) * scipy.special.beta(alpha + k, n -", "3, filter_size_max: int = 5, alpha: float = 1, beta:", "-> str: return ( f\"vision.{self.__class__.__name__}(\" f\"filter_size_min={self.filter_size_min}, \" f\"filter_size_max={self.filter_size_max}, \" f\"alpha={self.alpha},", "must be odd\" self.filter_size_min = filter_size_min self.filter_size_max = filter_size_max self.alpha", "float = 1, beta: float = 3, ) -> None:", "3, ) -> None: super().__init__(filter_size_min, filter_size_max, alpha, beta) def __call__(self,", "np_filter_probs = filter_probs / np_filter_probs.sum() return filter_sizes, np_filter_probs def sample_filter_size(self):", "= Image.new(\"L\", (w, 2 * h)) z.paste(x, (0, 0)) z.paste(y,", "filter_size = np.random.choice(self.filter_sizes, p=self.filter_probs) return filter_size def __call__(self, *args, **kwargs):", "int = 5, alpha: float = 1, beta: float =", "filter_size = self.sample_filter_size() return img.filter(ImageFilter.MaxFilter(filter_size)) class Erode(RandomBetaMorphology): def __init__( self,", "List, Tuple, Union import numpy as np import scipy.special from", "h)) z.paste(x, (0, 0)) z.paste(y, (0, h)) z = z.resize(size=(w", "self._create_filter_distribution( filter_size_min, filter_size_max, alpha, beta ) @staticmethod def _create_filter_distribution( filter_size_min:", "def __init__( self, filter_size_min: int, filter_size_max: int, alpha: float, beta:", "np import scipy.special from PIL import Image, ImageFilter class RandomBetaMorphology:", "float, beta: float ) -> None: assert filter_size_min % 2", "self.alpha = alpha self.beta = beta self.filter_sizes, self.filter_probs = self._create_filter_distribution(", "np.asarray([1.0], dtype=np.float32) filter_sizes = [] filter_probs = [] for k", "n = (filter_size_max - filter_size_min) // 2 + 1 if", "+ k, n - k + beta) ) np_filter_probs =", "None: super().__init__(filter_size_min, filter_size_max, alpha, beta) def __call__(self, img: Image) ->", "h)) z = z.resize(size=(w // 2, h), resample=Image.BICUBIC) z.show() input()", "alpha: float, beta: float ) -> Tuple[List[int], Union[List[float], np.ndarray]]: n", "ImageOps.invert(x) y = transformer(x) w, h = x.size z =", "0, \"Filter size must be odd\" assert filter_size_max % 2", "ImageOps parser = argparse.ArgumentParser() parser.add_argument(\"--operation\", choices=(\"dilate\", \"erode\"), default=\"dilate\") parser.add_argument(\"images\", type=argparse.FileType(\"rb\"),", "import Image, ImageFilter class RandomBetaMorphology: def __init__( self, filter_size_min: int,", "\" f\"filter_size_max={self.filter_size_max}, \" f\"alpha={self.alpha}, beta={self.beta})\" ) class Dilate(RandomBetaMorphology): def __init__(", "import ImageOps parser = argparse.ArgumentParser() parser.add_argument(\"--operation\", choices=(\"dilate\", \"erode\"), default=\"dilate\") parser.add_argument(\"images\",", "= np.random.choice(self.filter_sizes, p=self.filter_probs) return filter_size def __call__(self, *args, **kwargs): return", "0, \"Filter size must be odd\" self.filter_size_min = filter_size_min self.filter_size_max", "\" f\"alpha={self.alpha}, beta={self.beta})\" ) class Dilate(RandomBetaMorphology): def __init__( self, filter_size_min:", "= x.size z = Image.new(\"L\", (w, 2 * h)) z.paste(x,", "Image) -> Image: filter_size = self.sample_filter_size() return img.filter(ImageFilter.MinFilter(filter_size)) if __name__", "-> Tuple[List[int], Union[List[float], np.ndarray]]: n = (filter_size_max - filter_size_min) //", "k) * scipy.special.beta(alpha + k, n - k + beta)", "* k) filter_probs.append( scipy.special.comb(n, k) * scipy.special.beta(alpha + k, n", "p=self.filter_probs) return filter_size def __call__(self, *args, **kwargs): return NotImplementedError def", "Erode() for f in args.images: x = Image.open(f, \"r\").convert(\"L\") x", "filter_size_max: int, alpha: float, beta: float ) -> Tuple[List[int], Union[List[float],", "[filter_size_min], np.asarray([1.0], dtype=np.float32) filter_sizes = [] filter_probs = [] for", "int, filter_size_max: int, alpha: float, beta: float ) -> None:", "filter_size_max, alpha, beta ) @staticmethod def _create_filter_distribution( filter_size_min: int, filter_size_max:", "for k in range(n): filter_sizes.append(filter_size_min + 2 * k) filter_probs.append(", "int = 3, filter_size_max: int = 5, alpha: float =", "-> Image: filter_size = self.sample_filter_size() return img.filter(ImageFilter.MinFilter(filter_size)) if __name__ ==", "= Image.open(f, \"r\").convert(\"L\") x = ImageOps.invert(x) y = transformer(x) w,", "return img.filter(ImageFilter.MaxFilter(filter_size)) class Erode(RandomBetaMorphology): def __init__( self, filter_size_min: int =", "import List, Tuple, Union import numpy as np import scipy.special", "% 2 != 0, \"Filter size must be odd\" self.filter_size_min", "Image: filter_size = self.sample_filter_size() return img.filter(ImageFilter.MaxFilter(filter_size)) class Erode(RandomBetaMorphology): def __init__(", "parser.add_argument(\"--operation\", choices=(\"dilate\", \"erode\"), default=\"dilate\") parser.add_argument(\"images\", type=argparse.FileType(\"rb\"), nargs=\"+\") args = parser.parse_args()", "alpha: float = 1, beta: float = 3, ) ->", "args.operation == \"dilate\" else Erode() for f in args.images: x", "+ 2 * k) filter_probs.append( scipy.special.comb(n, k) * scipy.special.beta(alpha +", "f\"alpha={self.alpha}, beta={self.beta})\" ) class Dilate(RandomBetaMorphology): def __init__( self, filter_size_min: int", "filter_size_min, filter_size_max, alpha, beta ) @staticmethod def _create_filter_distribution( filter_size_min: int,", "= np.asarray(filter_probs, dtype=np.float32) np_filter_probs = filter_probs / np_filter_probs.sum() return filter_sizes,", "**kwargs): return NotImplementedError def __repr__(self) -> str: return ( f\"vision.{self.__class__.__name__}(\"", "transformer(x) w, h = x.size z = Image.new(\"L\", (w, 2", "as np import scipy.special from PIL import Image, ImageFilter class", "= transformer(x) w, h = x.size z = Image.new(\"L\", (w,", "Image.open(f, \"r\").convert(\"L\") x = ImageOps.invert(x) y = transformer(x) w, h", "== \"dilate\" else Erode() for f in args.images: x =", "Image.new(\"L\", (w, 2 * h)) z.paste(x, (0, 0)) z.paste(y, (0,", "filter_sizes.append(filter_size_min + 2 * k) filter_probs.append( scipy.special.comb(n, k) * scipy.special.beta(alpha", "\"r\").convert(\"L\") x = ImageOps.invert(x) y = transformer(x) w, h =", "= 3, filter_size_max: int = 5, alpha: float = 1,", "[] for k in range(n): filter_sizes.append(filter_size_min + 2 * k)", "beta self.filter_sizes, self.filter_probs = self._create_filter_distribution( filter_size_min, filter_size_max, alpha, beta )", "Tuple[List[int], Union[List[float], np.ndarray]]: n = (filter_size_max - filter_size_min) // 2", "beta) ) np_filter_probs = np.asarray(filter_probs, dtype=np.float32) np_filter_probs = filter_probs /", "int, alpha: float, beta: float ) -> None: assert filter_size_min", "return filter_size def __call__(self, *args, **kwargs): return NotImplementedError def __repr__(self)", "str: return ( f\"vision.{self.__class__.__name__}(\" f\"filter_size_min={self.filter_size_min}, \" f\"filter_size_max={self.filter_size_max}, \" f\"alpha={self.alpha}, beta={self.beta})\"", "beta) def __call__(self, img: Image) -> Image: filter_size = self.sample_filter_size()", "size must be odd\" self.filter_size_min = filter_size_min self.filter_size_max = filter_size_max", "from typing import List, Tuple, Union import numpy as np" ]
[ "sys.stdout.write('[GIT] ' + line + '\\n') sys.stdout.flush() class SimpleProgressPrinter(RemoteProgress): def", "error('[Hemp] ' + message, func, exception, stdout, stderr) def print_info(text,", "show_prefix, end, flush) def print_git_output(stdout): for line in stdout.split('\\n'): sys.stdout.write('[GIT]", "<gh_stars>1-10 import sys from fabric.utils import error, puts from git", "stdout=None, stderr=None): error('[Hemp] ' + message, func, exception, stdout, stderr)", "+ line + '\\n') sys.stdout.flush() class SimpleProgressPrinter(RemoteProgress): def _parse_progress_line(self, line):", "fabric.utils import error, puts from git import RemoteProgress def print_err(message,", "flush) def print_git_output(stdout): for line in stdout.split('\\n'): sys.stdout.write('[GIT] ' +", "def _parse_progress_line(self, line): if '\\r' in line: line = line.replace('\\r',", "line.replace('\\r', '\\r[GIT] ') sys.stdout.write('[GIT] ' + line + '\\n') sys.stdout.flush()", "end=\"\\n\", flush=True): puts('[Hemp] ' + text, show_prefix, end, flush) def", "puts from git import RemoteProgress def print_err(message, func=None, exception=None, stdout=None,", "= line.replace('\\r', '\\r[GIT] ') sys.stdout.write('[GIT] ' + line + '\\n')", "import sys from fabric.utils import error, puts from git import", "+ message, func, exception, stdout, stderr) def print_info(text, show_prefix=None, end=\"\\n\",", "git import RemoteProgress def print_err(message, func=None, exception=None, stdout=None, stderr=None): error('[Hemp]", "line: line = line.replace('\\r', '\\r[GIT] ') sys.stdout.write('[GIT] ' + line", "def print_err(message, func=None, exception=None, stdout=None, stderr=None): error('[Hemp] ' + message,", "exception=None, stdout=None, stderr=None): error('[Hemp] ' + message, func, exception, stdout,", "print_git_output(stdout): for line in stdout.split('\\n'): sys.stdout.write('[GIT] ' + line +", "line in stdout.split('\\n'): sys.stdout.write('[GIT] ' + line + '\\n') sys.stdout.flush()", "flush=True): puts('[Hemp] ' + text, show_prefix, end, flush) def print_git_output(stdout):", "func, exception, stdout, stderr) def print_info(text, show_prefix=None, end=\"\\n\", flush=True): puts('[Hemp]", "'\\r' in line: line = line.replace('\\r', '\\r[GIT] ') sys.stdout.write('[GIT] '", "puts('[Hemp] ' + text, show_prefix, end, flush) def print_git_output(stdout): for", "from fabric.utils import error, puts from git import RemoteProgress def", "in stdout.split('\\n'): sys.stdout.write('[GIT] ' + line + '\\n') sys.stdout.flush() class", "SimpleProgressPrinter(RemoteProgress): def _parse_progress_line(self, line): if '\\r' in line: line =", "print_err(message, func=None, exception=None, stdout=None, stderr=None): error('[Hemp] ' + message, func,", "RemoteProgress def print_err(message, func=None, exception=None, stdout=None, stderr=None): error('[Hemp] ' +", "exception, stdout, stderr) def print_info(text, show_prefix=None, end=\"\\n\", flush=True): puts('[Hemp] '", "stderr) def print_info(text, show_prefix=None, end=\"\\n\", flush=True): puts('[Hemp] ' + text,", "for line in stdout.split('\\n'): sys.stdout.write('[GIT] ' + line + '\\n')", "'\\n') sys.stdout.flush() class SimpleProgressPrinter(RemoteProgress): def _parse_progress_line(self, line): if '\\r' in", "stdout, stderr) def print_info(text, show_prefix=None, end=\"\\n\", flush=True): puts('[Hemp] ' +", "+ text, show_prefix, end, flush) def print_git_output(stdout): for line in", "line = line.replace('\\r', '\\r[GIT] ') sys.stdout.write('[GIT] ' + line +", "def print_info(text, show_prefix=None, end=\"\\n\", flush=True): puts('[Hemp] ' + text, show_prefix,", "sys.stdout.flush() class SimpleProgressPrinter(RemoteProgress): def _parse_progress_line(self, line): if '\\r' in line:", "class SimpleProgressPrinter(RemoteProgress): def _parse_progress_line(self, line): if '\\r' in line: line", "in line: line = line.replace('\\r', '\\r[GIT] ') sys.stdout.write('[GIT] ' +", "' + line + '\\n') sys.stdout.flush() class SimpleProgressPrinter(RemoteProgress): def _parse_progress_line(self,", "' + message, func, exception, stdout, stderr) def print_info(text, show_prefix=None,", "line + '\\n') sys.stdout.flush() class SimpleProgressPrinter(RemoteProgress): def _parse_progress_line(self, line): if", "stderr=None): error('[Hemp] ' + message, func, exception, stdout, stderr) def", "+ '\\n') sys.stdout.flush() class SimpleProgressPrinter(RemoteProgress): def _parse_progress_line(self, line): if '\\r'", "end, flush) def print_git_output(stdout): for line in stdout.split('\\n'): sys.stdout.write('[GIT] '", "import error, puts from git import RemoteProgress def print_err(message, func=None,", "' + text, show_prefix, end, flush) def print_git_output(stdout): for line", "stdout.split('\\n'): sys.stdout.write('[GIT] ' + line + '\\n') sys.stdout.flush() class SimpleProgressPrinter(RemoteProgress):", "_parse_progress_line(self, line): if '\\r' in line: line = line.replace('\\r', '\\r[GIT]", "if '\\r' in line: line = line.replace('\\r', '\\r[GIT] ') sys.stdout.write('[GIT]", "error, puts from git import RemoteProgress def print_err(message, func=None, exception=None,", "import RemoteProgress def print_err(message, func=None, exception=None, stdout=None, stderr=None): error('[Hemp] '", "def print_git_output(stdout): for line in stdout.split('\\n'): sys.stdout.write('[GIT] ' + line", "line): if '\\r' in line: line = line.replace('\\r', '\\r[GIT] ')", "message, func, exception, stdout, stderr) def print_info(text, show_prefix=None, end=\"\\n\", flush=True):", "func=None, exception=None, stdout=None, stderr=None): error('[Hemp] ' + message, func, exception,", "from git import RemoteProgress def print_err(message, func=None, exception=None, stdout=None, stderr=None):", "show_prefix=None, end=\"\\n\", flush=True): puts('[Hemp] ' + text, show_prefix, end, flush)", "print_info(text, show_prefix=None, end=\"\\n\", flush=True): puts('[Hemp] ' + text, show_prefix, end,", "sys from fabric.utils import error, puts from git import RemoteProgress", "text, show_prefix, end, flush) def print_git_output(stdout): for line in stdout.split('\\n'):" ]
[ "<reponame>dla1635/hyLink<gh_stars>1-10 # -*- coding: utf-8 -*- from collections import Counter", "= text.strip() self.tokens = self.okt.phrases(self.text) self.bow = Counter(self.tokens) def __str__(self):", "class Sentence(object): okt = Okt() def __init__(self, text, index=0): self.index", "Okt() def __init__(self, text, index=0): self.index = index self.text =", "def __init__(self, text, index=0): self.index = index self.text = text.strip()", "from collections import Counter from konlpy.tag import Okt class Sentence(object):", "utf-8 -*- from collections import Counter from konlpy.tag import Okt", "text, index=0): self.index = index self.text = text.strip() self.tokens =", "from konlpy.tag import Okt class Sentence(object): okt = Okt() def", "= self.okt.phrases(self.text) self.bow = Counter(self.tokens) def __str__(self): return self.text def", "text.strip() self.tokens = self.okt.phrases(self.text) self.bow = Counter(self.tokens) def __str__(self): return", "Counter from konlpy.tag import Okt class Sentence(object): okt = Okt()", "import Okt class Sentence(object): okt = Okt() def __init__(self, text,", "Sentence(object): okt = Okt() def __init__(self, text, index=0): self.index =", "Okt class Sentence(object): okt = Okt() def __init__(self, text, index=0):", "collections import Counter from konlpy.tag import Okt class Sentence(object): okt", "-*- from collections import Counter from konlpy.tag import Okt class", "= Okt() def __init__(self, text, index=0): self.index = index self.text", "index self.text = text.strip() self.tokens = self.okt.phrases(self.text) self.bow = Counter(self.tokens)", "self.text = text.strip() self.tokens = self.okt.phrases(self.text) self.bow = Counter(self.tokens) def", "__init__(self, text, index=0): self.index = index self.text = text.strip() self.tokens", "= Counter(self.tokens) def __str__(self): return self.text def __hash__(self): return self.index", "konlpy.tag import Okt class Sentence(object): okt = Okt() def __init__(self,", "okt = Okt() def __init__(self, text, index=0): self.index = index", "index=0): self.index = index self.text = text.strip() self.tokens = self.okt.phrases(self.text)", "# -*- coding: utf-8 -*- from collections import Counter from", "self.index = index self.text = text.strip() self.tokens = self.okt.phrases(self.text) self.bow", "coding: utf-8 -*- from collections import Counter from konlpy.tag import", "self.tokens = self.okt.phrases(self.text) self.bow = Counter(self.tokens) def __str__(self): return self.text", "= index self.text = text.strip() self.tokens = self.okt.phrases(self.text) self.bow =", "-*- coding: utf-8 -*- from collections import Counter from konlpy.tag", "import Counter from konlpy.tag import Okt class Sentence(object): okt =", "self.okt.phrases(self.text) self.bow = Counter(self.tokens) def __str__(self): return self.text def __hash__(self):", "self.bow = Counter(self.tokens) def __str__(self): return self.text def __hash__(self): return" ]
[ "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR", "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "must reproduce the above copyright notice, # this list of", "above copyright notice, # this list of conditions and the", "self._parser.parse(\"nop\") self.assertEqual(str(asm), \"nop\") # Misc # ======================================================================== # def test_misc_1(self):", "rights reserved. # Redistribution and use in source and binary", "ptr [ebx+edx*4+0x10]\") def test_zero_oprnd(self): asm = self._parser.parse(\"nop\") self.assertEqual(str(asm), \"nop\") #", "test_misc_1(self): asm = self._parser.parse(\"mov dword ptr [-0x21524111], ecx\") self.assertEqual(str(asm), \"mov", "NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE", "EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO,", "asm = self._parser.parse(\"fucompi st(1)\") self.assertEqual(str(asm), \"fucompi st1\") class X86Parser64BitsTests(unittest.TestCase): def", "the following conditions are met: # 1. Redistributions of source", "rax, [rbx+r15*4+0x10]\") # Misc # ======================================================================== # def test_misc_offset_1(self): asm", "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR", "self.assertNotEqual(str(asm), \"mov dword ptr [0xdeadbeef], ecx\") def test_misc_2(self): asm =", "INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT", "OF THE POSSIBILITY OF SUCH DAMAGE. from __future__ import absolute_import", "GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS;", "asm = self._parser.parse(\"jmp 0x12345678\") self.assertEqual(str(asm), \"jmp 0x12345678\") def test_one_oprnd_mem(self): asm", "A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL", "notice, this # list of conditions and the following disclaimer.", "# def test_misc_1(self): asm = self._parser.parse(\"mov dword ptr [-0x21524111], ecx\")", "THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "copyright notice, this # list of conditions and the following", "self._parser = X86Parser(ARCH_X86_MODE_32) def test_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add eax, ebx\")", "import absolute_import import unittest from barf.arch import ARCH_X86_MODE_32 from barf.arch", "source and binary forms, with or without # modification, are", "def test_zero_oprnd(self): asm = self._parser.parse(\"nop\") self.assertEqual(str(asm), \"nop\") # Misc #", "PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE", "test_64_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add rax, [rbx + r15 * 4", "ANY WAY OUT OF THE USE # OF THIS SOFTWARE,", "dword ptr [-0x21524111], ecx\") self.assertEqual(str(asm), \"mov dword ptr [-0x21524111], ecx\")", "self._parser.parse(\"add [ebx + edx * 4 + 0x10], eax\") self.assertEqual(str(asm),", "0x12345678\") def test_one_oprnd_mem(self): asm = self._parser.parse(\"inc dword ptr [ebx+edx*4+0x10]\") self.assertEqual(str(asm),", "of source code must retain the above copyright notice, this", "SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR", "barf.arch.x86.parser import X86Parser class X86Parser32BitsTests(unittest.TestCase): def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_32)", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED.", "= self._parser.parse(\"add rax, rbx\") self.assertEqual(str(asm), \"add rax, rbx\") def test_64_two_oprnd_reg_reg_2(self):", "OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR", "AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR IMPLIED", "ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, #", "= self._parser.parse(\"add eax, 0x12345678\") self.assertEqual(str(asm), \"add eax, 0x12345678\") def test_two_oprnd_reg_mem(self):", "disclaimer. # 2. Redistributions in binary form must reproduce the", "test_one_oprnd_imm(self): asm = self._parser.parse(\"jmp 0x12345678\") self.assertEqual(str(asm), \"jmp 0x12345678\") def test_one_oprnd_mem(self):", "provided that the following conditions are met: # 1. Redistributions", "= self._parser.parse(\"add rax, r8\") self.assertEqual(str(asm), \"add rax, r8\") def test_64_two_oprnd_reg_mem(self):", "and binary forms, with or without # modification, are permitted", "\"jmp 0x12345678\") def test_one_oprnd_mem(self): asm = self._parser.parse(\"inc dword ptr [ebx+edx*4+0x10]\")", "USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE", "BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,", "source code must retain the above copyright notice, this #", "def test_misc_1(self): asm = self._parser.parse(\"mov dword ptr [-0x21524111], ecx\") self.assertEqual(str(asm),", "OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON", "eax, [ebx + edx * 4 + 0x10]\") self.assertEqual(str(asm), \"add", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE", "(c) 2014, Fundacion Dr. <NAME> # All rights reserved. #", "OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)", "eax, 0x12345678\") def test_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add eax, [ebx +", "= self._parser.parse(\"nop\") self.assertEqual(str(asm), \"nop\") # Misc # ======================================================================== # def", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE", "WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE", "self._parser.parse(\"add rax, r8\") self.assertEqual(str(asm), \"add rax, r8\") def test_64_two_oprnd_reg_mem(self): asm", "self.assertEqual(str(asm), \"add rax, r8\") def test_64_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add rax,", "test_misc_2(self): asm = self._parser.parse(\"fucompi st(1)\") self.assertEqual(str(asm), \"fucompi st1\") class X86Parser64BitsTests(unittest.TestCase):", "+ 0x10]\") self.assertEqual(str(asm), \"add rax, [rbx+r15*4+0x10]\") # Misc # ========================================================================", "notice, # this list of conditions and the following disclaimer", "NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES;", "= self._parser.parse(\"add byte ptr [rax+0xffffff89], cl\") self.assertEqual(str(asm), \"add byte ptr", "self._parser.parse(\"add eax, 0x12345678\") self.assertEqual(str(asm), \"add eax, 0x12345678\") def test_two_oprnd_reg_mem(self): asm", "retain the above copyright notice, this # list of conditions", "OUT OF THE USE # OF THIS SOFTWARE, EVEN IF", "+ r15 * 4 + 0x10]\") self.assertEqual(str(asm), \"add rax, [rbx+r15*4+0x10]\")", "OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY", "ebx\") def test_two_oprnd_reg_imm(self): asm = self._parser.parse(\"add eax, 0x12345678\") self.assertEqual(str(asm), \"add", "self.assertEqual(str(asm), \"add eax, 0x12345678\") def test_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add eax,", "CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "eax, ebx\") self.assertEqual(str(asm), \"add eax, ebx\") def test_two_oprnd_reg_imm(self): asm =", "eax, 0x12345678\") self.assertEqual(str(asm), \"add eax, 0x12345678\") def test_two_oprnd_reg_mem(self): asm =", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "disclaimer in the documentation # and/or other materials provided with", "test_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add eax, ebx\") self.assertEqual(str(asm), \"add eax, ebx\")", "\"AS IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "[-0x21524111], ecx\") self.assertNotEqual(str(asm), \"mov dword ptr [0xdeadbeef], ecx\") def test_misc_2(self):", "CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES,", "+ edx * 4 + 0x10], eax\") self.assertEqual(str(asm), \"add [ebx+edx*4+0x10],", "\"add eax, 0x12345678\") def test_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add eax, [ebx", "4 + 0x10], eax\") self.assertEqual(str(asm), \"add [ebx+edx*4+0x10], eax\") def test_one_oprnd_reg(self):", "Fundacion Dr. <NAME> # All rights reserved. # Redistribution and", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from", "SUCH DAMAGE. from __future__ import absolute_import import unittest from barf.arch", "import unittest from barf.arch import ARCH_X86_MODE_32 from barf.arch import ARCH_X86_MODE_64", "# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "rbx\") def test_64_two_oprnd_reg_reg_2(self): asm = self._parser.parse(\"add rax, r8\") self.assertEqual(str(asm), \"add", "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "self._parser.parse(\"jmp 0x12345678\") self.assertEqual(str(asm), \"jmp 0x12345678\") def test_one_oprnd_mem(self): asm = self._parser.parse(\"inc", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS", "self.assertEqual(str(asm), \"add eax, ebx\") def test_two_oprnd_reg_imm(self): asm = self._parser.parse(\"add eax,", "CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)", "Redistributions of source code must retain the above copyright notice,", "rbx\") self.assertEqual(str(asm), \"add rax, rbx\") def test_64_two_oprnd_reg_reg_2(self): asm = self._parser.parse(\"add", "of conditions and the following disclaimer. # 2. Redistributions in", "permitted provided that the following conditions are met: # 1.", "OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from __future__ import", "def test_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add eax, [ebx + edx *", "ecx\") self.assertEqual(str(asm), \"mov dword ptr [-0x21524111], ecx\") self.assertNotEqual(str(asm), \"mov dword", "documentation # and/or other materials provided with the distribution. #", "X86Parser class X86Parser32BitsTests(unittest.TestCase): def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_32) def test_two_oprnd_reg_reg(self):", "self.assertEqual(str(asm), \"nop\") # Misc # ======================================================================== # def test_misc_1(self): asm", "r8\") self.assertEqual(str(asm), \"add rax, r8\") def test_64_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add", "IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE", "or without # modification, are permitted provided that the following", "OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT", "def test_two_oprnd_reg_imm(self): asm = self._parser.parse(\"add eax, 0x12345678\") self.assertEqual(str(asm), \"add eax,", "Redistribution and use in source and binary forms, with or", "code must retain the above copyright notice, this # list", "# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "\"add eax, [ebx+edx*4+0x10]\") def test_two_oprnd_mem_reg(self): asm = self._parser.parse(\"add [ebx +", "asm = self._parser.parse(\"nop\") self.assertEqual(str(asm), \"nop\") # Misc # ======================================================================== #", "\"add byte ptr [rax+0xffffff89], cl\") def main(): unittest.main() if __name__", "that the following conditions are met: # 1. Redistributions of", "HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT,", "this # list of conditions and the following disclaimer. #", "= self._parser.parse(\"inc eax\") self.assertEqual(str(asm), \"inc eax\") def test_one_oprnd_imm(self): asm =", "======================================================================== # def test_misc_offset_1(self): asm = self._parser.parse(\"add byte ptr [rax+0xffffff89],", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from __future__", "asm = self._parser.parse(\"add rax, [rbx + r15 * 4 +", "def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_64) def test_64_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add", "# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING,", "barf.arch import ARCH_X86_MODE_64 from barf.arch.x86.parser import X86Parser class X86Parser32BitsTests(unittest.TestCase): def", "eax\") self.assertEqual(str(asm), \"add [ebx+edx*4+0x10], eax\") def test_one_oprnd_reg(self): asm = self._parser.parse(\"inc", "eax, [ebx+edx*4+0x10]\") def test_two_oprnd_mem_reg(self): asm = self._parser.parse(\"add [ebx + edx", "binary form must reproduce the above copyright notice, # this", "self.assertEqual(str(asm), \"mov dword ptr [-0x21524111], ecx\") self.assertNotEqual(str(asm), \"mov dword ptr", "ptr [-0x21524111], ecx\") self.assertEqual(str(asm), \"mov dword ptr [-0x21524111], ecx\") self.assertNotEqual(str(asm),", "POSSIBILITY OF SUCH DAMAGE. from __future__ import absolute_import import unittest", "* 4 + 0x10], eax\") self.assertEqual(str(asm), \"add [ebx+edx*4+0x10], eax\") def", "4 + 0x10]\") self.assertEqual(str(asm), \"add rax, [rbx+r15*4+0x10]\") # Misc #", "byte ptr [rax+0xffffff89], cl\") self.assertEqual(str(asm), \"add byte ptr [rax+0xffffff89], cl\")", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" #", "= self._parser.parse(\"inc dword ptr [ebx+edx*4+0x10]\") self.assertEqual(str(asm), \"inc dword ptr [ebx+edx*4+0x10]\")", "\"mov dword ptr [0xdeadbeef], ecx\") def test_misc_2(self): asm = self._parser.parse(\"fucompi", "= self._parser.parse(\"mov dword ptr [-0x21524111], ecx\") self.assertEqual(str(asm), \"mov dword ptr", "All rights reserved. # Redistribution and use in source and", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL #", "BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF", "= X86Parser(ARCH_X86_MODE_32) def test_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add eax, ebx\") self.assertEqual(str(asm),", "in source and binary forms, with or without # modification,", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING", "EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE #", "in the documentation # and/or other materials provided with the", "reserved. # Redistribution and use in source and binary forms,", "of conditions and the following disclaimer in the documentation #", "0x12345678\") self.assertEqual(str(asm), \"jmp 0x12345678\") def test_one_oprnd_mem(self): asm = self._parser.parse(\"inc dword", "__future__ import absolute_import import unittest from barf.arch import ARCH_X86_MODE_32 from", "ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER", "test_misc_offset_1(self): asm = self._parser.parse(\"add byte ptr [rax+0xffffff89], cl\") self.assertEqual(str(asm), \"add", "are met: # 1. Redistributions of source code must retain", "def test_64_two_oprnd_reg_reg_2(self): asm = self._parser.parse(\"add rax, r8\") self.assertEqual(str(asm), \"add rax,", "self._parser = X86Parser(ARCH_X86_MODE_64) def test_64_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add rax, rbx\")", "self._parser.parse(\"inc eax\") self.assertEqual(str(asm), \"inc eax\") def test_one_oprnd_imm(self): asm = self._parser.parse(\"jmp", "[-0x21524111], ecx\") self.assertEqual(str(asm), \"mov dword ptr [-0x21524111], ecx\") self.assertNotEqual(str(asm), \"mov", "\"mov dword ptr [-0x21524111], ecx\") self.assertNotEqual(str(asm), \"mov dword ptr [0xdeadbeef],", "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND", "form must reproduce the above copyright notice, # this list", "ptr [rax+0xffffff89], cl\") self.assertEqual(str(asm), \"add byte ptr [rax+0xffffff89], cl\") def", "# 1. Redistributions of source code must retain the above", "use in source and binary forms, with or without #", "LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR", "<NAME> # All rights reserved. # Redistribution and use in", "CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,", "barf.arch import ARCH_X86_MODE_32 from barf.arch import ARCH_X86_MODE_64 from barf.arch.x86.parser import", "def test_one_oprnd_mem(self): asm = self._parser.parse(\"inc dword ptr [ebx+edx*4+0x10]\") self.assertEqual(str(asm), \"inc", "asm = self._parser.parse(\"add rax, rbx\") self.assertEqual(str(asm), \"add rax, rbx\") def", "WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN", "edx * 4 + 0x10], eax\") self.assertEqual(str(asm), \"add [ebx+edx*4+0x10], eax\")", "def test_64_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add rax, rbx\") self.assertEqual(str(asm), \"add rax,", "X86Parser32BitsTests(unittest.TestCase): def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_32) def test_two_oprnd_reg_reg(self): asm =", "ecx\") def test_misc_2(self): asm = self._parser.parse(\"fucompi st(1)\") self.assertEqual(str(asm), \"fucompi st1\")", "other materials provided with the distribution. # THIS SOFTWARE IS", "OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED", "COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT,", "from barf.arch import ARCH_X86_MODE_64 from barf.arch.x86.parser import X86Parser class X86Parser32BitsTests(unittest.TestCase):", "test_two_oprnd_reg_imm(self): asm = self._parser.parse(\"add eax, 0x12345678\") self.assertEqual(str(asm), \"add eax, 0x12345678\")", "= X86Parser(ARCH_X86_MODE_64) def test_64_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add rax, rbx\") self.assertEqual(str(asm),", "* 4 + 0x10]\") self.assertEqual(str(asm), \"add rax, [rbx+r15*4+0x10]\") # Misc", "dword ptr [ebx+edx*4+0x10]\") self.assertEqual(str(asm), \"inc dword ptr [ebx+edx*4+0x10]\") def test_zero_oprnd(self):", "byte ptr [rax+0xffffff89], cl\") def main(): unittest.main() if __name__ ==", "with or without # modification, are permitted provided that the", "test_one_oprnd_reg(self): asm = self._parser.parse(\"inc eax\") self.assertEqual(str(asm), \"inc eax\") def test_one_oprnd_imm(self):", "[rax+0xffffff89], cl\") self.assertEqual(str(asm), \"add byte ptr [rax+0xffffff89], cl\") def main():", "following conditions are met: # 1. Redistributions of source code", "self.assertEqual(str(asm), \"add byte ptr [rax+0xffffff89], cl\") def main(): unittest.main() if", "asm = self._parser.parse(\"add eax, 0x12345678\") self.assertEqual(str(asm), \"add eax, 0x12345678\") def", "= self._parser.parse(\"add [ebx + edx * 4 + 0x10], eax\")", "ARCH_X86_MODE_64 from barf.arch.x86.parser import X86Parser class X86Parser32BitsTests(unittest.TestCase): def setUp(self): self._parser", "def test_one_oprnd_reg(self): asm = self._parser.parse(\"inc eax\") self.assertEqual(str(asm), \"inc eax\") def", "self._parser.parse(\"mov dword ptr [-0x21524111], ecx\") self.assertEqual(str(asm), \"mov dword ptr [-0x21524111],", "# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "OF SUCH DAMAGE. from __future__ import absolute_import import unittest from", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES", "st(1)\") self.assertEqual(str(asm), \"fucompi st1\") class X86Parser64BitsTests(unittest.TestCase): def setUp(self): self._parser =", "# this list of conditions and the following disclaimer in", "[ebx + edx * 4 + 0x10]\") self.assertEqual(str(asm), \"add eax,", "IN ANY WAY OUT OF THE USE # OF THIS", "[ebx+edx*4+0x10], eax\") def test_one_oprnd_reg(self): asm = self._parser.parse(\"inc eax\") self.assertEqual(str(asm), \"inc", "# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL", "must retain the above copyright notice, this # list of", "# ======================================================================== # def test_misc_offset_1(self): asm = self._parser.parse(\"add byte ptr", "+ 0x10]\") self.assertEqual(str(asm), \"add eax, [ebx+edx*4+0x10]\") def test_two_oprnd_mem_reg(self): asm =", "# Misc # ======================================================================== # def test_misc_offset_1(self): asm = self._parser.parse(\"add", "Dr. <NAME> # All rights reserved. # Redistribution and use", "[ebx+edx*4+0x10]\") self.assertEqual(str(asm), \"inc dword ptr [ebx+edx*4+0x10]\") def test_zero_oprnd(self): asm =", "X86Parser64BitsTests(unittest.TestCase): def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_64) def test_64_two_oprnd_reg_reg(self): asm =", "self.assertEqual(str(asm), \"fucompi st1\") class X86Parser64BitsTests(unittest.TestCase): def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_64)", "FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT", "self._parser.parse(\"add eax, [ebx + edx * 4 + 0x10]\") self.assertEqual(str(asm),", "the following disclaimer in the documentation # and/or other materials", "HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR", "and use in source and binary forms, with or without", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE", "SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR", "THE POSSIBILITY OF SUCH DAMAGE. from __future__ import absolute_import import", "dword ptr [ebx+edx*4+0x10]\") def test_zero_oprnd(self): asm = self._parser.parse(\"nop\") self.assertEqual(str(asm), \"nop\")", "2014, Fundacion Dr. <NAME> # All rights reserved. # Redistribution", "PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE,", "+ 0x10], eax\") self.assertEqual(str(asm), \"add [ebx+edx*4+0x10], eax\") def test_one_oprnd_reg(self): asm", "conditions and the following disclaimer in the documentation # and/or", "# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS", "reproduce the above copyright notice, # this list of conditions", "AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT", "in binary form must reproduce the above copyright notice, #", "# All rights reserved. # Redistribution and use in source", "forms, with or without # modification, are permitted provided that", "ARISING IN ANY WAY OUT OF THE USE # OF", "binary forms, with or without # modification, are permitted provided", "# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN", "edx * 4 + 0x10]\") self.assertEqual(str(asm), \"add eax, [ebx+edx*4+0x10]\") def", "# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR", "\"inc eax\") def test_one_oprnd_imm(self): asm = self._parser.parse(\"jmp 0x12345678\") self.assertEqual(str(asm), \"jmp", "# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE", "# ======================================================================== # def test_misc_1(self): asm = self._parser.parse(\"mov dword ptr", "[rbx + r15 * 4 + 0x10]\") self.assertEqual(str(asm), \"add rax,", "LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN", "[0xdeadbeef], ecx\") def test_misc_2(self): asm = self._parser.parse(\"fucompi st(1)\") self.assertEqual(str(asm), \"fucompi", "with the distribution. # THIS SOFTWARE IS PROVIDED BY THE", "TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF", "= self._parser.parse(\"add eax, [ebx + edx * 4 + 0x10]\")", "the documentation # and/or other materials provided with the distribution.", "materials provided with the distribution. # THIS SOFTWARE IS PROVIDED", "test_zero_oprnd(self): asm = self._parser.parse(\"nop\") self.assertEqual(str(asm), \"nop\") # Misc # ========================================================================", "\"nop\") # Misc # ======================================================================== # def test_misc_1(self): asm =", "[ebx+edx*4+0x10]\") def test_two_oprnd_mem_reg(self): asm = self._parser.parse(\"add [ebx + edx *", "are permitted provided that the following conditions are met: #", "above copyright notice, this # list of conditions and the", "def test_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add eax, ebx\") self.assertEqual(str(asm), \"add eax,", "LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "rax, rbx\") self.assertEqual(str(asm), \"add rax, rbx\") def test_64_two_oprnd_reg_reg_2(self): asm =", "\"fucompi st1\") class X86Parser64BitsTests(unittest.TestCase): def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_64) def", "self._parser.parse(\"fucompi st(1)\") self.assertEqual(str(asm), \"fucompi st1\") class X86Parser64BitsTests(unittest.TestCase): def setUp(self): self._parser", "provided with the distribution. # THIS SOFTWARE IS PROVIDED BY", "distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "asm = self._parser.parse(\"add byte ptr [rax+0xffffff89], cl\") self.assertEqual(str(asm), \"add byte", "Copyright (c) 2014, Fundacion Dr. <NAME> # All rights reserved.", "conditions are met: # 1. Redistributions of source code must", "4 + 0x10]\") self.assertEqual(str(asm), \"add eax, [ebx+edx*4+0x10]\") def test_two_oprnd_mem_reg(self): asm", "self._parser.parse(\"add byte ptr [rax+0xffffff89], cl\") self.assertEqual(str(asm), \"add byte ptr [rax+0xffffff89],", "Redistributions in binary form must reproduce the above copyright notice,", "test_64_two_oprnd_reg_reg_2(self): asm = self._parser.parse(\"add rax, r8\") self.assertEqual(str(asm), \"add rax, r8\")", "# Redistribution and use in source and binary forms, with", "ptr [-0x21524111], ecx\") self.assertNotEqual(str(asm), \"mov dword ptr [0xdeadbeef], ecx\") def", "AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN", "the above copyright notice, # this list of conditions and", "Misc # ======================================================================== # def test_misc_1(self): asm = self._parser.parse(\"mov dword", "HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER", "self._parser.parse(\"add rax, [rbx + r15 * 4 + 0x10]\") self.assertEqual(str(asm),", "import ARCH_X86_MODE_32 from barf.arch import ARCH_X86_MODE_64 from barf.arch.x86.parser import X86Parser", "list of conditions and the following disclaimer in the documentation", "eax\") def test_one_oprnd_imm(self): asm = self._parser.parse(\"jmp 0x12345678\") self.assertEqual(str(asm), \"jmp 0x12345678\")", "asm = self._parser.parse(\"add eax, [ebx + edx * 4 +", "def test_two_oprnd_mem_reg(self): asm = self._parser.parse(\"add [ebx + edx * 4", "# modification, are permitted provided that the following conditions are", "[rax+0xffffff89], cl\") def main(): unittest.main() if __name__ == '__main__': main()", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS", "following disclaimer in the documentation # and/or other materials provided", "FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO", "test_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add eax, [ebx + edx * 4", "self.assertEqual(str(asm), \"inc eax\") def test_one_oprnd_imm(self): asm = self._parser.parse(\"jmp 0x12345678\") self.assertEqual(str(asm),", "def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_32) def test_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add", "def test_misc_2(self): asm = self._parser.parse(\"fucompi st(1)\") self.assertEqual(str(asm), \"fucompi st1\") class", "self._parser.parse(\"add rax, rbx\") self.assertEqual(str(asm), \"add rax, rbx\") def test_64_two_oprnd_reg_reg_2(self): asm", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED", "0x10]\") self.assertEqual(str(asm), \"add eax, [ebx+edx*4+0x10]\") def test_two_oprnd_mem_reg(self): asm = self._parser.parse(\"add", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "class X86Parser32BitsTests(unittest.TestCase): def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_32) def test_two_oprnd_reg_reg(self): asm", "asm = self._parser.parse(\"add eax, ebx\") self.assertEqual(str(asm), \"add eax, ebx\") def", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "copyright notice, # this list of conditions and the following", "OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA,", "\"add [ebx+edx*4+0x10], eax\") def test_one_oprnd_reg(self): asm = self._parser.parse(\"inc eax\") self.assertEqual(str(asm),", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES", "rax, r8\") self.assertEqual(str(asm), \"add rax, r8\") def test_64_two_oprnd_reg_mem(self): asm =", "INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF", "eax, ebx\") def test_two_oprnd_reg_imm(self): asm = self._parser.parse(\"add eax, 0x12345678\") self.assertEqual(str(asm),", "conditions and the following disclaimer. # 2. Redistributions in binary", "test_one_oprnd_mem(self): asm = self._parser.parse(\"inc dword ptr [ebx+edx*4+0x10]\") self.assertEqual(str(asm), \"inc dword", "OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "r8\") def test_64_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add rax, [rbx + r15", "and the following disclaimer. # 2. Redistributions in binary form", "the above copyright notice, this # list of conditions and", "and the following disclaimer in the documentation # and/or other", "self.assertEqual(str(asm), \"add eax, [ebx+edx*4+0x10]\") def test_two_oprnd_mem_reg(self): asm = self._parser.parse(\"add [ebx", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "= self._parser.parse(\"add eax, ebx\") self.assertEqual(str(asm), \"add eax, ebx\") def test_two_oprnd_reg_imm(self):", "= self._parser.parse(\"add rax, [rbx + r15 * 4 + 0x10]\")", "the following disclaimer. # 2. Redistributions in binary form must", "[ebx + edx * 4 + 0x10], eax\") self.assertEqual(str(asm), \"add", "following disclaimer. # 2. Redistributions in binary form must reproduce", "self._parser.parse(\"add eax, ebx\") self.assertEqual(str(asm), \"add eax, ebx\") def test_two_oprnd_reg_imm(self): asm", "asm = self._parser.parse(\"mov dword ptr [-0x21524111], ecx\") self.assertEqual(str(asm), \"mov dword", "= self._parser.parse(\"fucompi st(1)\") self.assertEqual(str(asm), \"fucompi st1\") class X86Parser64BitsTests(unittest.TestCase): def setUp(self):", "= self._parser.parse(\"jmp 0x12345678\") self.assertEqual(str(asm), \"jmp 0x12345678\") def test_one_oprnd_mem(self): asm =", "test_64_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add rax, rbx\") self.assertEqual(str(asm), \"add rax, rbx\")", "self.assertEqual(str(asm), \"add [ebx+edx*4+0x10], eax\") def test_one_oprnd_reg(self): asm = self._parser.parse(\"inc eax\")", "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS", "self.assertEqual(str(asm), \"add rax, [rbx+r15*4+0x10]\") # Misc # ======================================================================== # def", "LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "\"add rax, [rbx+r15*4+0x10]\") # Misc # ======================================================================== # def test_misc_offset_1(self):", "def test_64_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add rax, [rbx + r15 *", "eax\") self.assertEqual(str(asm), \"inc eax\") def test_one_oprnd_imm(self): asm = self._parser.parse(\"jmp 0x12345678\")", "without # modification, are permitted provided that the following conditions", "the distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF", "Misc # ======================================================================== # def test_misc_offset_1(self): asm = self._parser.parse(\"add byte", "absolute_import import unittest from barf.arch import ARCH_X86_MODE_32 from barf.arch import", "rax, r8\") def test_64_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add rax, [rbx +", "# list of conditions and the following disclaimer. # 2.", "this list of conditions and the following disclaimer in the", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY", "modification, are permitted provided that the following conditions are met:", "IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR", "THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF", "ptr [0xdeadbeef], ecx\") def test_misc_2(self): asm = self._parser.parse(\"fucompi st(1)\") self.assertEqual(str(asm),", "rax, [rbx + r15 * 4 + 0x10]\") self.assertEqual(str(asm), \"add", "import X86Parser class X86Parser32BitsTests(unittest.TestCase): def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_32) def", "# Misc # ======================================================================== # def test_misc_1(self): asm = self._parser.parse(\"mov", "# Copyright (c) 2014, Fundacion Dr. <NAME> # All rights", "ebx\") self.assertEqual(str(asm), \"add eax, ebx\") def test_two_oprnd_reg_imm(self): asm = self._parser.parse(\"add", "setUp(self): self._parser = X86Parser(ARCH_X86_MODE_64) def test_64_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add rax,", "asm = self._parser.parse(\"add rax, r8\") self.assertEqual(str(asm), \"add rax, r8\") def", "r15 * 4 + 0x10]\") self.assertEqual(str(asm), \"add rax, [rbx+r15*4+0x10]\") #", "dword ptr [-0x21524111], ecx\") self.assertNotEqual(str(asm), \"mov dword ptr [0xdeadbeef], ecx\")", "ptr [ebx+edx*4+0x10]\") self.assertEqual(str(asm), \"inc dword ptr [ebx+edx*4+0x10]\") def test_zero_oprnd(self): asm", "0x12345678\") def test_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add eax, [ebx + edx", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #", "======================================================================== # def test_misc_1(self): asm = self._parser.parse(\"mov dword ptr [-0x21524111],", "\"add rax, rbx\") def test_64_two_oprnd_reg_reg_2(self): asm = self._parser.parse(\"add rax, r8\")", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"", "list of conditions and the following disclaimer. # 2. Redistributions", "ARCH_X86_MODE_32 from barf.arch import ARCH_X86_MODE_64 from barf.arch.x86.parser import X86Parser class", "dword ptr [0xdeadbeef], ecx\") def test_misc_2(self): asm = self._parser.parse(\"fucompi st(1)\")", "0x10]\") self.assertEqual(str(asm), \"add rax, [rbx+r15*4+0x10]\") # Misc # ======================================================================== #", "X86Parser(ARCH_X86_MODE_32) def test_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add eax, ebx\") self.assertEqual(str(asm), \"add", "met: # 1. Redistributions of source code must retain the", "from __future__ import absolute_import import unittest from barf.arch import ARCH_X86_MODE_32", "asm = self._parser.parse(\"add [ebx + edx * 4 + 0x10],", "self.assertEqual(str(asm), \"inc dword ptr [ebx+edx*4+0x10]\") def test_zero_oprnd(self): asm = self._parser.parse(\"nop\")", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE #", "STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING", "ecx\") self.assertNotEqual(str(asm), \"mov dword ptr [0xdeadbeef], ecx\") def test_misc_2(self): asm", "def test_one_oprnd_imm(self): asm = self._parser.parse(\"jmp 0x12345678\") self.assertEqual(str(asm), \"jmp 0x12345678\") def", "test_two_oprnd_mem_reg(self): asm = self._parser.parse(\"add [ebx + edx * 4 +", "setUp(self): self._parser = X86Parser(ARCH_X86_MODE_32) def test_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add eax,", "NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND", "* 4 + 0x10]\") self.assertEqual(str(asm), \"add eax, [ebx+edx*4+0x10]\") def test_two_oprnd_mem_reg(self):", "[ebx+edx*4+0x10]\") def test_zero_oprnd(self): asm = self._parser.parse(\"nop\") self.assertEqual(str(asm), \"nop\") # Misc", "USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED", "BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY", "INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY,", "\"add rax, r8\") def test_64_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add rax, [rbx", "0x10], eax\") self.assertEqual(str(asm), \"add [ebx+edx*4+0x10], eax\") def test_one_oprnd_reg(self): asm =", "IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "import ARCH_X86_MODE_64 from barf.arch.x86.parser import X86Parser class X86Parser32BitsTests(unittest.TestCase): def setUp(self):", "INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT", "and/or other materials provided with the distribution. # THIS SOFTWARE", "1. Redistributions of source code must retain the above copyright", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE #", "def test_misc_offset_1(self): asm = self._parser.parse(\"add byte ptr [rax+0xffffff89], cl\") self.assertEqual(str(asm),", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "\"inc dword ptr [ebx+edx*4+0x10]\") def test_zero_oprnd(self): asm = self._parser.parse(\"nop\") self.assertEqual(str(asm),", "# def test_misc_offset_1(self): asm = self._parser.parse(\"add byte ptr [rax+0xffffff89], cl\")", "unittest from barf.arch import ARCH_X86_MODE_32 from barf.arch import ARCH_X86_MODE_64 from", "ptr [rax+0xffffff89], cl\") def main(): unittest.main() if __name__ == '__main__':", "DAMAGE. from __future__ import absolute_import import unittest from barf.arch import", "THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "self._parser.parse(\"inc dword ptr [ebx+edx*4+0x10]\") self.assertEqual(str(asm), \"inc dword ptr [ebx+edx*4+0x10]\") def", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS", "class X86Parser64BitsTests(unittest.TestCase): def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_64) def test_64_two_oprnd_reg_reg(self): asm", "# and/or other materials provided with the distribution. # THIS", "eax\") def test_one_oprnd_reg(self): asm = self._parser.parse(\"inc eax\") self.assertEqual(str(asm), \"inc eax\")", "\"add eax, ebx\") def test_two_oprnd_reg_imm(self): asm = self._parser.parse(\"add eax, 0x12345678\")", "self.assertEqual(str(asm), \"jmp 0x12345678\") def test_one_oprnd_mem(self): asm = self._parser.parse(\"inc dword ptr", "TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "from barf.arch.x86.parser import X86Parser class X86Parser32BitsTests(unittest.TestCase): def setUp(self): self._parser =", "asm = self._parser.parse(\"inc eax\") self.assertEqual(str(asm), \"inc eax\") def test_one_oprnd_imm(self): asm", "2. Redistributions in binary form must reproduce the above copyright", "st1\") class X86Parser64BitsTests(unittest.TestCase): def setUp(self): self._parser = X86Parser(ARCH_X86_MODE_64) def test_64_two_oprnd_reg_reg(self):", "ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "self.assertEqual(str(asm), \"add rax, rbx\") def test_64_two_oprnd_reg_reg_2(self): asm = self._parser.parse(\"add rax,", "PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY", "cl\") self.assertEqual(str(asm), \"add byte ptr [rax+0xffffff89], cl\") def main(): unittest.main()", "OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER #", "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED", "+ edx * 4 + 0x10]\") self.assertEqual(str(asm), \"add eax, [ebx+edx*4+0x10]\")", "[rbx+r15*4+0x10]\") # Misc # ======================================================================== # def test_misc_offset_1(self): asm =", "BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #", "X86Parser(ARCH_X86_MODE_64) def test_64_two_oprnd_reg_reg(self): asm = self._parser.parse(\"add rax, rbx\") self.assertEqual(str(asm), \"add", "from barf.arch import ARCH_X86_MODE_32 from barf.arch import ARCH_X86_MODE_64 from barf.arch.x86.parser", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND", "asm = self._parser.parse(\"inc dword ptr [ebx+edx*4+0x10]\") self.assertEqual(str(asm), \"inc dword ptr", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY", "# 2. Redistributions in binary form must reproduce the above", "rax, rbx\") def test_64_two_oprnd_reg_reg_2(self): asm = self._parser.parse(\"add rax, r8\") self.assertEqual(str(asm),", "0x12345678\") self.assertEqual(str(asm), \"add eax, 0x12345678\") def test_two_oprnd_reg_mem(self): asm = self._parser.parse(\"add" ]
[ "and I want for date.\") print(\"Consumer is waiting.\") done.acquire() #", "limit the number of threads performing certain operations.For example, performing", "want for date.\") print(\"Consumer is waiting.\") done.acquire() # Waits for", "a semaphore for signaling between threads import threading import time", "and continues print(\"Consumer got\", item) t1 = threading.Thread(target=producer) t2 =", "print(\"Producer is going to sleep.\") time.sleep(5) item = \"Hello\" print(\"Producer", "threading.Thread(target=consumer) t1.start() t2.start() \"\"\" Semaphore Uses: 1. Resource control You", "-*- # sema_signal.py # # An example of using a", "between threads. For example, having one thread wake up another", "item print(\"I'm the producer and I produce data.\") print(\"Producer is", "operations.For example, performing database queries making network connections 2. Signaling", "sleep.\") time.sleep(5) item = \"Hello\" print(\"Producer is alive. Signaling the", "\"\"\" Semaphore Uses: 1. Resource control You can limit the", "print(\"Consumer got\", item) t1 = threading.Thread(target=producer) t2 = threading.Thread(target=consumer) t1.start()", "to send \"signals\" between threads. For example, having one thread", "-*- coding:utf-8 -*- # sema_signal.py # # An example of", "item = None def producer(): global item print(\"I'm the producer", "is alive. Signaling the consumer.\") done.release() # Increments the count", "for date.\") print(\"Consumer is waiting.\") done.acquire() # Waits for the", "Resource control You can limit the number of threads performing", "You can limit the number of threads performing certain operations.For", "Resource control. item = None def producer(): global item print(\"I'm", "the count and signals waiting threads def consumer(): print(\"I'm a", "print(\"I'm a consumer and I want for date.\") print(\"Consumer is", "database queries making network connections 2. Signaling Semaphores can be", "be used to send \"signals\" between threads. For example, having", "consumer(): print(\"I'm a consumer and I want for date.\") print(\"Consumer", "Increments the count and signals waiting threads def consumer(): print(\"I'm", "Semaphore Uses: 1. Resource control You can limit the number", "I want for date.\") print(\"Consumer is waiting.\") done.acquire() # Waits", "for the count is 0, otherwise decrements the count and", "can limit the number of threads performing certain operations.For example,", "= threading.Semaphore(0) # Resource control. item = None def producer():", "def consumer(): print(\"I'm a consumer and I want for date.\")", "data.\") print(\"Producer is going to sleep.\") time.sleep(5) item = \"Hello\"", "consumer and I want for date.\") print(\"Consumer is waiting.\") done.acquire()", "performing certain operations.For example, performing database queries making network connections", "to sleep.\") time.sleep(5) item = \"Hello\" print(\"Producer is alive. Signaling", "decrements the count and continues print(\"Consumer got\", item) t1 =", "is 0, otherwise decrements the count and continues print(\"Consumer got\",", "and I produce data.\") print(\"Producer is going to sleep.\") time.sleep(5)", "producer and I produce data.\") print(\"Producer is going to sleep.\")", "is going to sleep.\") time.sleep(5) item = \"Hello\" print(\"Producer is", "between threads import threading import time done = threading.Semaphore(0) #", "the count is 0, otherwise decrements the count and continues", "send \"signals\" between threads. For example, having one thread wake", "print(\"I'm the producer and I produce data.\") print(\"Producer is going", "# -*- coding:utf-8 -*- # sema_signal.py # # An example", "= None def producer(): global item print(\"I'm the producer and", "continues print(\"Consumer got\", item) t1 = threading.Thread(target=producer) t2 = threading.Thread(target=consumer)", "python3 # -*- coding:utf-8 -*- # sema_signal.py # # An", "queries making network connections 2. Signaling Semaphores can be used", "semaphore for signaling between threads import threading import time done", "producer(): global item print(\"I'm the producer and I produce data.\")", "count and signals waiting threads def consumer(): print(\"I'm a consumer", "a consumer and I want for date.\") print(\"Consumer is waiting.\")", "t2.start() \"\"\" Semaphore Uses: 1. Resource control You can limit", "the count and continues print(\"Consumer got\", item) t1 = threading.Thread(target=producer)", "print(\"Consumer is waiting.\") done.acquire() # Waits for the count is", "can be used to send \"signals\" between threads. For example,", "For example, having one thread wake up another thread \"\"\"", "import threading import time done = threading.Semaphore(0) # Resource control.", "signaling between threads import threading import time done = threading.Semaphore(0)", "An example of using a semaphore for signaling between threads", "\"Hello\" print(\"Producer is alive. Signaling the consumer.\") done.release() # Increments", "threading.Thread(target=producer) t2 = threading.Thread(target=consumer) t1.start() t2.start() \"\"\" Semaphore Uses: 1.", "coding:utf-8 -*- # sema_signal.py # # An example of using", "None def producer(): global item print(\"I'm the producer and I", "example of using a semaphore for signaling between threads import", "the consumer.\") done.release() # Increments the count and signals waiting", "produce data.\") print(\"Producer is going to sleep.\") time.sleep(5) item =", "of using a semaphore for signaling between threads import threading", "threading import time done = threading.Semaphore(0) # Resource control. item", "count and continues print(\"Consumer got\", item) t1 = threading.Thread(target=producer) t2", "= \"Hello\" print(\"Producer is alive. Signaling the consumer.\") done.release() #", "<reponame>Chyi341152/pyConPaper #!/usr/bin/env python3 # -*- coding:utf-8 -*- # sema_signal.py #", "I produce data.\") print(\"Producer is going to sleep.\") time.sleep(5) item", "Waits for the count is 0, otherwise decrements the count", "import time done = threading.Semaphore(0) # Resource control. item =", "connections 2. Signaling Semaphores can be used to send \"signals\"", "count is 0, otherwise decrements the count and continues print(\"Consumer", "consumer.\") done.release() # Increments the count and signals waiting threads", "# An example of using a semaphore for signaling between", "waiting.\") done.acquire() # Waits for the count is 0, otherwise", "item = \"Hello\" print(\"Producer is alive. Signaling the consumer.\") done.release()", "= threading.Thread(target=producer) t2 = threading.Thread(target=consumer) t1.start() t2.start() \"\"\" Semaphore Uses:", "Uses: 1. Resource control You can limit the number of", "the number of threads performing certain operations.For example, performing database", "threads. For example, having one thread wake up another thread", "got\", item) t1 = threading.Thread(target=producer) t2 = threading.Thread(target=consumer) t1.start() t2.start()", "# sema_signal.py # # An example of using a semaphore", "item) t1 = threading.Thread(target=producer) t2 = threading.Thread(target=consumer) t1.start() t2.start() \"\"\"", "using a semaphore for signaling between threads import threading import", "control You can limit the number of threads performing certain", "t1.start() t2.start() \"\"\" Semaphore Uses: 1. Resource control You can", "and signals waiting threads def consumer(): print(\"I'm a consumer and", "going to sleep.\") time.sleep(5) item = \"Hello\" print(\"Producer is alive.", "print(\"Producer is alive. Signaling the consumer.\") done.release() # Increments the", "making network connections 2. Signaling Semaphores can be used to", "date.\") print(\"Consumer is waiting.\") done.acquire() # Waits for the count", "Semaphores can be used to send \"signals\" between threads. For", "is waiting.\") done.acquire() # Waits for the count is 0,", "#!/usr/bin/env python3 # -*- coding:utf-8 -*- # sema_signal.py # #", "example, performing database queries making network connections 2. Signaling Semaphores", "0, otherwise decrements the count and continues print(\"Consumer got\", item)", "done = threading.Semaphore(0) # Resource control. item = None def", "done.release() # Increments the count and signals waiting threads def", "# Increments the count and signals waiting threads def consumer():", "time.sleep(5) item = \"Hello\" print(\"Producer is alive. Signaling the consumer.\")", "sema_signal.py # # An example of using a semaphore for", "# # An example of using a semaphore for signaling", "# Waits for the count is 0, otherwise decrements the", "Signaling the consumer.\") done.release() # Increments the count and signals", "for signaling between threads import threading import time done =", "signals waiting threads def consumer(): print(\"I'm a consumer and I", "global item print(\"I'm the producer and I produce data.\") print(\"Producer", "= threading.Thread(target=consumer) t1.start() t2.start() \"\"\" Semaphore Uses: 1. Resource control", "used to send \"signals\" between threads. For example, having one", "\"signals\" between threads. For example, having one thread wake up", "# Resource control. item = None def producer(): global item", "threads def consumer(): print(\"I'm a consumer and I want for", "the producer and I produce data.\") print(\"Producer is going to", "of threads performing certain operations.For example, performing database queries making", "1. Resource control You can limit the number of threads", "def producer(): global item print(\"I'm the producer and I produce", "threads import threading import time done = threading.Semaphore(0) # Resource", "Signaling Semaphores can be used to send \"signals\" between threads.", "done.acquire() # Waits for the count is 0, otherwise decrements", "threads performing certain operations.For example, performing database queries making network", "network connections 2. Signaling Semaphores can be used to send", "waiting threads def consumer(): print(\"I'm a consumer and I want", "control. item = None def producer(): global item print(\"I'm the", "otherwise decrements the count and continues print(\"Consumer got\", item) t1", "threading.Semaphore(0) # Resource control. item = None def producer(): global", "t1 = threading.Thread(target=producer) t2 = threading.Thread(target=consumer) t1.start() t2.start() \"\"\" Semaphore", "alive. Signaling the consumer.\") done.release() # Increments the count and", "t2 = threading.Thread(target=consumer) t1.start() t2.start() \"\"\" Semaphore Uses: 1. Resource", "2. Signaling Semaphores can be used to send \"signals\" between", "number of threads performing certain operations.For example, performing database queries", "performing database queries making network connections 2. Signaling Semaphores can", "time done = threading.Semaphore(0) # Resource control. item = None", "certain operations.For example, performing database queries making network connections 2." ]
[ "== 'c': V = volume else: V = extracellular_volume if", ":Copyright: 2016-2018, Karr Lab :License: MIT ''' # TODO(Arthur): IMPORTANT:", "if isinstance(model, Model): species = model.get_component_by_id(speciesId, 'species') compartment = model.get_component_by_id(compartmentId,", "= model.get_component_by_id(speciesId, 'species') compartment = model.get_component_by_id(compartmentId, 'compartments') yData = species_counts[species.index,", "2016-2018, Karr Lab :License: MIT ''' # TODO(Arthur): IMPORTANT: refactor", "wc_lang import Model, Submodel from scipy.constants import Avogadro import numpy", "pyplot.title(title) pyplot.xlabel('Time (h)') if units == 'molecules': pyplot.ylabel('Copy number') else:", "pyplot.gca().get_yaxis().set_major_formatter(y_formatter) if len(selected_species_compartments) > 1: pyplot.legend() #save if fileName: fig.savefig(fileName)", "raise Exception('Invalid model type %s' % model.__class__.__name__) #scale if compartmentId", "yData = species_counts[species.index, compartment.index, :] elif isinstance(model, Submodel): yData =", "= 1 / Avogadro / V * 1e12 elif units", "V = volume else: V = extracellular_volume if units ==", "#update range yMin = min(yMin, np.min(yData)) yMax = max(yMax, np.max(yData))", "''): #convert time to hours time = time.copy() / 3600", "else: V = extracellular_volume if units == 'pM': scale =", "yMin = min(yMin, np.min(yData)) yMax = max(yMax, np.max(yData)) #add to", ":Author: <NAME> <<EMAIL>> :Date: 2016-03-26 :Copyright: 2016-2018, Karr Lab :License:", "= 1 / Avogadro / V * 1e0 elif units", "V * 1e6 elif units == 'mM': scale = 1", "yData in yDatas.items(): #update range yMin = min(yMin, np.min(yData)) yMax", "Exception('Invalid units \"%s\"' % units) yData *= scale yDatas[species_compartment_id] =", "selected_species_compartments: #extract data match = re.match('^(?P<speciesId>[a-z0-9\\-_]+)\\[(?P<compartmentId>[a-z0-9\\-_]+)\\]$', species_compartment_id, re.I).groupdict() speciesId =", "Lab :License: MIT ''' # TODO(Arthur): IMPORTANT: refactor and replace", "= {}, units = 'mM', title = '', fileName =", "1 / Avogadro / V * 1e6 elif units ==", "volume else: V = extracellular_volume if units == 'pM': scale", "elif units == 'molecules': scale = 1 else: raise Exception('Invalid", "/ V * 1e0 elif units == 'molecules': scale =", "= min(yMin, np.min(yData)) yMax = max(yMax, np.max(yData)) #add to plot", "== 'M': scale = 1 / Avogadro / V *", "= volume else: V = extracellular_volume if units == 'pM':", "/ V * 1e9 elif units == 'uM': scale =", "time = np.zeros(0), species_counts = None, volume = np.zeros(0), extracellular_volume", "model.get_component_by_id(speciesId, 'species') compartment = model.get_component_by_id(compartmentId, 'compartments') yData = species_counts[species.index, compartment.index,", "title: pyplot.title(title) pyplot.xlabel('Time (h)') if units == 'molecules': pyplot.ylabel('Copy number')", "compartmentId == 'c': V = volume else: V = extracellular_volume", "1 / Avogadro / V * 1e9 elif units ==", "match = re.match('^(?P<speciesId>[a-z0-9\\-_]+)\\[(?P<compartmentId>[a-z0-9\\-_]+)\\]$', species_compartment_id, re.I).groupdict() speciesId = match['speciesId'] compartmentId =", "compartment = model.get_component_by_id(compartmentId, 'compartments') yData = species_counts[species.index, compartment.index, :] elif", "Submodel from scipy.constants import Avogadro import numpy as np import", ":] elif isinstance(model, Submodel): yData = species_counts[species_compartment_id] else: raise Exception('Invalid", "if units == 'pM': scale = 1 / Avogadro /", "scale = 1 / Avogadro / V * 1e12 elif", "elif units == 'mM': scale = 1 / Avogadro /", "1e3 elif units == 'M': scale = 1 / Avogadro", "'pM': scale = 1 / Avogadro / V * 1e12", "time.copy() / 3600 #create figure fig = pyplot.figure() #extract data", "Karr Lab :License: MIT ''' # TODO(Arthur): IMPORTANT: refactor and", "'compartments') yData = species_counts[species.index, compartment.index, :] elif isinstance(model, Submodel): yData", "= max(yMax, np.max(yData)) #add to plot pyplot.plot(time, yData, label=label) #set", "species_compartment_id, re.I).groupdict() speciesId = match['speciesId'] compartmentId = match['compartmentId'] if isinstance(model,", "axis limits pyplot.xlim((0, time[-1])) pyplot.ylim((yMin, yMax)) #add axis labels and", "pyplot from matplotlib import ticker from wc_lang import Model, Submodel", "match['compartmentId'] if isinstance(model, Model): species = model.get_component_by_id(speciesId, 'species') compartment =", "= 1 / Avogadro / V * 1e6 elif units", "pyplot.figure() #extract data to plot if not yDatas: yDatas =", "== 'pM': scale = 1 / Avogadro / V *", "'molecules': pyplot.ylabel('Copy number') else: pyplot.ylabel('Concentration (%s)' % units) y_formatter =", "np.zeros(0), selected_species_compartments = [], yDatas = {}, units = 'mM',", "elif units == 'uM': scale = 1 / Avogadro /", "1e12 elif units == 'nM': scale = 1 / Avogadro", "= extracellular_volume if units == 'pM': scale = 1 /", "== 'nM': scale = 1 / Avogadro / V *", "Avogadro import numpy as np import re def plot(model, time", "max(yMax, np.max(yData)) #add to plot pyplot.plot(time, yData, label=label) #set axis", "% units) y_formatter = ticker.ScalarFormatter(useOffset=False) pyplot.gca().get_yaxis().set_major_formatter(y_formatter) if len(selected_species_compartments) > 1:", "= 1e12 yMax = -1e12 for label, yData in yDatas.items():", "plot pyplot.plot(time, yData, label=label) #set axis limits pyplot.xlim((0, time[-1])) pyplot.ylim((yMin,", "* 1e3 elif units == 'M': scale = 1 /", "else: raise Exception('Invalid units \"%s\"' % units) yData *= scale", "(h)') if units == 'molecules': pyplot.ylabel('Copy number') else: pyplot.ylabel('Concentration (%s)'", "ticker.ScalarFormatter(useOffset=False) pyplot.gca().get_yaxis().set_major_formatter(y_formatter) if len(selected_species_compartments) > 1: pyplot.legend() #save if fileName:", "scipy.constants import Avogadro import numpy as np import re def", "pyplot.ylim((yMin, yMax)) #add axis labels and legend if title: pyplot.title(title)", "/ V * 1e12 elif units == 'nM': scale =", "#extract data match = re.match('^(?P<speciesId>[a-z0-9\\-_]+)\\[(?P<compartmentId>[a-z0-9\\-_]+)\\]$', species_compartment_id, re.I).groupdict() speciesId = match['speciesId']", "3600 #create figure fig = pyplot.figure() #extract data to plot", "elif isinstance(model, Submodel): yData = species_counts[species_compartment_id] else: raise Exception('Invalid model", "'M': scale = 1 / Avogadro / V * 1e0", "from scipy.constants import Avogadro import numpy as np import re", "re.I).groupdict() speciesId = match['speciesId'] compartmentId = match['compartmentId'] if isinstance(model, Model):", "1e12 yMax = -1e12 for label, yData in yDatas.items(): #update", "from matplotlib import ticker from wc_lang import Model, Submodel from", "extracellular_volume = np.zeros(0), selected_species_compartments = [], yDatas = {}, units", "extracellular_volume if units == 'pM': scale = 1 / Avogadro", "pyplot.xlabel('Time (h)') if units == 'molecules': pyplot.ylabel('Copy number') else: pyplot.ylabel('Concentration", "yDatas = {} for species_compartment_id in selected_species_compartments: #extract data match", "species_counts = None, volume = np.zeros(0), extracellular_volume = np.zeros(0), selected_species_compartments", "elif units == 'M': scale = 1 / Avogadro /", "np.max(yData)) #add to plot pyplot.plot(time, yData, label=label) #set axis limits", "import numpy as np import re def plot(model, time =", "scale = 1 else: raise Exception('Invalid units \"%s\"' % units)", "units) y_formatter = ticker.ScalarFormatter(useOffset=False) pyplot.gca().get_yaxis().set_major_formatter(y_formatter) if len(selected_species_compartments) > 1: pyplot.legend()", "replace from matplotlib import pyplot from matplotlib import ticker from", "volume = np.zeros(0), extracellular_volume = np.zeros(0), selected_species_compartments = [], yDatas", "compartment.index, :] elif isinstance(model, Submodel): yData = species_counts[species_compartment_id] else: raise", "#scale if compartmentId == 'c': V = volume else: V", "scale = 1 / Avogadro / V * 1e9 elif", "axis labels and legend if title: pyplot.title(title) pyplot.xlabel('Time (h)') if", "[], yDatas = {}, units = 'mM', title = '',", "= pyplot.figure() #extract data to plot if not yDatas: yDatas", "1e9 elif units == 'uM': scale = 1 / Avogadro", "units == 'M': scale = 1 / Avogadro / V", "V * 1e3 elif units == 'M': scale = 1", "numpy as np import re def plot(model, time = np.zeros(0),", "import Avogadro import numpy as np import re def plot(model,", "1 / Avogadro / V * 1e3 elif units ==", "= '', fileName = ''): #convert time to hours time", "/ 3600 #create figure fig = pyplot.figure() #extract data to", "= 1 / Avogadro / V * 1e9 elif units", "time[-1])) pyplot.ylim((yMin, yMax)) #add axis labels and legend if title:", "= model.get_component_by_id(compartmentId, 'compartments') yData = species_counts[species.index, compartment.index, :] elif isinstance(model,", "speciesId = match['speciesId'] compartmentId = match['compartmentId'] if isinstance(model, Model): species", "= 1 else: raise Exception('Invalid units \"%s\"' % units) yData", "results yMin = 1e12 yMax = -1e12 for label, yData", "== 'uM': scale = 1 / Avogadro / V *", "scale = 1 / Avogadro / V * 1e0 elif", "range yMin = min(yMin, np.min(yData)) yMax = max(yMax, np.max(yData)) #add", "#extract data to plot if not yDatas: yDatas = {}", "{}, units = 'mM', title = '', fileName = ''):", "'''Analysis utility functions. :Author: <NAME> <<EMAIL>> :Date: 2016-03-26 :Copyright: 2016-2018,", "= np.zeros(0), selected_species_compartments = [], yDatas = {}, units =", "time = time.copy() / 3600 #create figure fig = pyplot.figure()", "to plot if not yDatas: yDatas = {} for species_compartment_id", "isinstance(model, Model): species = model.get_component_by_id(speciesId, 'species') compartment = model.get_component_by_id(compartmentId, 'compartments')", "IMPORTANT: refactor and replace from matplotlib import pyplot from matplotlib", "= None, volume = np.zeros(0), extracellular_volume = np.zeros(0), selected_species_compartments =", "units == 'nM': scale = 1 / Avogadro / V", "raise Exception('Invalid units \"%s\"' % units) yData *= scale yDatas[species_compartment_id]", "yMax = -1e12 for label, yData in yDatas.items(): #update range", "number') else: pyplot.ylabel('Concentration (%s)' % units) y_formatter = ticker.ScalarFormatter(useOffset=False) pyplot.gca().get_yaxis().set_major_formatter(y_formatter)", "= 1 / Avogadro / V * 1e3 elif units", "units == 'pM': scale = 1 / Avogadro / V", "Avogadro / V * 1e3 elif units == 'M': scale", "== 'mM': scale = 1 / Avogadro / V *", "yDatas: yDatas = {} for species_compartment_id in selected_species_compartments: #extract data", "scale = 1 / Avogadro / V * 1e3 elif", "* 1e0 elif units == 'molecules': scale = 1 else:", "not yDatas: yDatas = {} for species_compartment_id in selected_species_compartments: #extract", "np.min(yData)) yMax = max(yMax, np.max(yData)) #add to plot pyplot.plot(time, yData,", "units == 'molecules': pyplot.ylabel('Copy number') else: pyplot.ylabel('Concentration (%s)' % units)", "#add axis labels and legend if title: pyplot.title(title) pyplot.xlabel('Time (h)')", "yDatas = {}, units = 'mM', title = '', fileName", "Avogadro / V * 1e6 elif units == 'mM': scale", "#set axis limits pyplot.xlim((0, time[-1])) pyplot.ylim((yMin, yMax)) #add axis labels", "model.get_component_by_id(compartmentId, 'compartments') yData = species_counts[species.index, compartment.index, :] elif isinstance(model, Submodel):", "functions. :Author: <NAME> <<EMAIL>> :Date: 2016-03-26 :Copyright: 2016-2018, Karr Lab", "1e0 elif units == 'molecules': scale = 1 else: raise", "/ V * 1e3 elif units == 'M': scale =", "'c': V = volume else: V = extracellular_volume if units", "label, yData in yDatas.items(): #update range yMin = min(yMin, np.min(yData))", "2016-03-26 :Copyright: 2016-2018, Karr Lab :License: MIT ''' # TODO(Arthur):", "Model, Submodel from scipy.constants import Avogadro import numpy as np", "match['speciesId'] compartmentId = match['compartmentId'] if isinstance(model, Model): species = model.get_component_by_id(speciesId,", "species = model.get_component_by_id(speciesId, 'species') compartment = model.get_component_by_id(compartmentId, 'compartments') yData =", "np.zeros(0), species_counts = None, volume = np.zeros(0), extracellular_volume = np.zeros(0),", "species_counts[species_compartment_id] else: raise Exception('Invalid model type %s' % model.__class__.__name__) #scale", "= ''): #convert time to hours time = time.copy() /", "model type %s' % model.__class__.__name__) #scale if compartmentId == 'c':", "units = 'mM', title = '', fileName = ''): #convert", "to plot pyplot.plot(time, yData, label=label) #set axis limits pyplot.xlim((0, time[-1]))", "{} for species_compartment_id in selected_species_compartments: #extract data match = re.match('^(?P<speciesId>[a-z0-9\\-_]+)\\[(?P<compartmentId>[a-z0-9\\-_]+)\\]$',", "<NAME> <<EMAIL>> :Date: 2016-03-26 :Copyright: 2016-2018, Karr Lab :License: MIT", "species_compartment_id in selected_species_compartments: #extract data match = re.match('^(?P<speciesId>[a-z0-9\\-_]+)\\[(?P<compartmentId>[a-z0-9\\-_]+)\\]$', species_compartment_id, re.I).groupdict()", "min(yMin, np.min(yData)) yMax = max(yMax, np.max(yData)) #add to plot pyplot.plot(time,", "yData, label=label) #set axis limits pyplot.xlim((0, time[-1])) pyplot.ylim((yMin, yMax)) #add", "Avogadro / V * 1e12 elif units == 'nM': scale", "re.match('^(?P<speciesId>[a-z0-9\\-_]+)\\[(?P<compartmentId>[a-z0-9\\-_]+)\\]$', species_compartment_id, re.I).groupdict() speciesId = match['speciesId'] compartmentId = match['compartmentId'] if", "'species') compartment = model.get_component_by_id(compartmentId, 'compartments') yData = species_counts[species.index, compartment.index, :]", "np.zeros(0), extracellular_volume = np.zeros(0), selected_species_compartments = [], yDatas = {},", "figure fig = pyplot.figure() #extract data to plot if not", "1 / Avogadro / V * 1e0 elif units ==", "in yDatas.items(): #update range yMin = min(yMin, np.min(yData)) yMax =", "yDatas.items(): #update range yMin = min(yMin, np.min(yData)) yMax = max(yMax,", "legend if title: pyplot.title(title) pyplot.xlabel('Time (h)') if units == 'molecules':", "fig = pyplot.figure() #extract data to plot if not yDatas:", "pyplot.ylabel('Concentration (%s)' % units) y_formatter = ticker.ScalarFormatter(useOffset=False) pyplot.gca().get_yaxis().set_major_formatter(y_formatter) if len(selected_species_compartments)", "= np.zeros(0), extracellular_volume = np.zeros(0), selected_species_compartments = [], yDatas =", "1e6 elif units == 'mM': scale = 1 / Avogadro", "= 'mM', title = '', fileName = ''): #convert time", "% model.__class__.__name__) #scale if compartmentId == 'c': V = volume", "title = '', fileName = ''): #convert time to hours", "\"%s\"' % units) yData *= scale yDatas[species_compartment_id] = yData #plot", "units \"%s\"' % units) yData *= scale yDatas[species_compartment_id] = yData", "else: pyplot.ylabel('Concentration (%s)' % units) y_formatter = ticker.ScalarFormatter(useOffset=False) pyplot.gca().get_yaxis().set_major_formatter(y_formatter) if", "/ Avogadro / V * 1e9 elif units == 'uM':", ":License: MIT ''' # TODO(Arthur): IMPORTANT: refactor and replace from", "V * 1e9 elif units == 'uM': scale = 1", "scale = 1 / Avogadro / V * 1e6 elif", "scale yDatas[species_compartment_id] = yData #plot results yMin = 1e12 yMax", "y_formatter = ticker.ScalarFormatter(useOffset=False) pyplot.gca().get_yaxis().set_major_formatter(y_formatter) if len(selected_species_compartments) > 1: pyplot.legend() #save", "*= scale yDatas[species_compartment_id] = yData #plot results yMin = 1e12", "import ticker from wc_lang import Model, Submodel from scipy.constants import", "else: raise Exception('Invalid model type %s' % model.__class__.__name__) #scale if", "from matplotlib import pyplot from matplotlib import ticker from wc_lang", "elif units == 'nM': scale = 1 / Avogadro /", "ticker from wc_lang import Model, Submodel from scipy.constants import Avogadro", "selected_species_compartments = [], yDatas = {}, units = 'mM', title", "/ Avogadro / V * 1e12 elif units == 'nM':", "pyplot.xlim((0, time[-1])) pyplot.ylim((yMin, yMax)) #add axis labels and legend if", "'mM': scale = 1 / Avogadro / V * 1e3", "if len(selected_species_compartments) > 1: pyplot.legend() #save if fileName: fig.savefig(fileName) pyplot.close(fig)", "matplotlib import ticker from wc_lang import Model, Submodel from scipy.constants", "data to plot if not yDatas: yDatas = {} for", "isinstance(model, Submodel): yData = species_counts[species_compartment_id] else: raise Exception('Invalid model type", "to hours time = time.copy() / 3600 #create figure fig", "for species_compartment_id in selected_species_compartments: #extract data match = re.match('^(?P<speciesId>[a-z0-9\\-_]+)\\[(?P<compartmentId>[a-z0-9\\-_]+)\\]$', species_compartment_id,", "= -1e12 for label, yData in yDatas.items(): #update range yMin", "TODO(Arthur): IMPORTANT: refactor and replace from matplotlib import pyplot from", "if not yDatas: yDatas = {} for species_compartment_id in selected_species_compartments:", "label=label) #set axis limits pyplot.xlim((0, time[-1])) pyplot.ylim((yMin, yMax)) #add axis", "= ticker.ScalarFormatter(useOffset=False) pyplot.gca().get_yaxis().set_major_formatter(y_formatter) if len(selected_species_compartments) > 1: pyplot.legend() #save if", "= time.copy() / 3600 #create figure fig = pyplot.figure() #extract", "''' # TODO(Arthur): IMPORTANT: refactor and replace from matplotlib import", "model.__class__.__name__) #scale if compartmentId == 'c': V = volume else:", "None, volume = np.zeros(0), extracellular_volume = np.zeros(0), selected_species_compartments = [],", "'', fileName = ''): #convert time to hours time =", "V * 1e12 elif units == 'nM': scale = 1", "yData #plot results yMin = 1e12 yMax = -1e12 for", "units == 'uM': scale = 1 / Avogadro / V", "V * 1e0 elif units == 'molecules': scale = 1", "labels and legend if title: pyplot.title(title) pyplot.xlabel('Time (h)') if units", "%s' % model.__class__.__name__) #scale if compartmentId == 'c': V =", "units) yData *= scale yDatas[species_compartment_id] = yData #plot results yMin", "yMin = 1e12 yMax = -1e12 for label, yData in", "units == 'molecules': scale = 1 else: raise Exception('Invalid units", "if compartmentId == 'c': V = volume else: V =", "V = extracellular_volume if units == 'pM': scale = 1", "type %s' % model.__class__.__name__) #scale if compartmentId == 'c': V", "plot if not yDatas: yDatas = {} for species_compartment_id in", "1 else: raise Exception('Invalid units \"%s\"' % units) yData *=", "if title: pyplot.title(title) pyplot.xlabel('Time (h)') if units == 'molecules': pyplot.ylabel('Copy", "if units == 'molecules': pyplot.ylabel('Copy number') else: pyplot.ylabel('Concentration (%s)' %", "#add to plot pyplot.plot(time, yData, label=label) #set axis limits pyplot.xlim((0,", "yMax = max(yMax, np.max(yData)) #add to plot pyplot.plot(time, yData, label=label)", "utility functions. :Author: <NAME> <<EMAIL>> :Date: 2016-03-26 :Copyright: 2016-2018, Karr", "1 / Avogadro / V * 1e12 elif units ==", "/ Avogadro / V * 1e0 elif units == 'molecules':", "species_counts[species.index, compartment.index, :] elif isinstance(model, Submodel): yData = species_counts[species_compartment_id] else:", "/ V * 1e6 elif units == 'mM': scale =", "import Model, Submodel from scipy.constants import Avogadro import numpy as", "'mM', title = '', fileName = ''): #convert time to", "#plot results yMin = 1e12 yMax = -1e12 for label,", "hours time = time.copy() / 3600 #create figure fig =", "= re.match('^(?P<speciesId>[a-z0-9\\-_]+)\\[(?P<compartmentId>[a-z0-9\\-_]+)\\]$', species_compartment_id, re.I).groupdict() speciesId = match['speciesId'] compartmentId = match['compartmentId']", "time to hours time = time.copy() / 3600 #create figure", "# TODO(Arthur): IMPORTANT: refactor and replace from matplotlib import pyplot", "= species_counts[species.index, compartment.index, :] elif isinstance(model, Submodel): yData = species_counts[species_compartment_id]", "'nM': scale = 1 / Avogadro / V * 1e9", "and legend if title: pyplot.title(title) pyplot.xlabel('Time (h)') if units ==", "limits pyplot.xlim((0, time[-1])) pyplot.ylim((yMin, yMax)) #add axis labels and legend", "yData *= scale yDatas[species_compartment_id] = yData #plot results yMin =", "#convert time to hours time = time.copy() / 3600 #create", "* 1e6 elif units == 'mM': scale = 1 /", "MIT ''' # TODO(Arthur): IMPORTANT: refactor and replace from matplotlib", "data match = re.match('^(?P<speciesId>[a-z0-9\\-_]+)\\[(?P<compartmentId>[a-z0-9\\-_]+)\\]$', species_compartment_id, re.I).groupdict() speciesId = match['speciesId'] compartmentId", "re def plot(model, time = np.zeros(0), species_counts = None, volume", "for label, yData in yDatas.items(): #update range yMin = min(yMin,", "-1e12 for label, yData in yDatas.items(): #update range yMin =", "% units) yData *= scale yDatas[species_compartment_id] = yData #plot results", "= yData #plot results yMin = 1e12 yMax = -1e12", "(%s)' % units) y_formatter = ticker.ScalarFormatter(useOffset=False) pyplot.gca().get_yaxis().set_major_formatter(y_formatter) if len(selected_species_compartments) >", "matplotlib import pyplot from matplotlib import ticker from wc_lang import", "compartmentId = match['compartmentId'] if isinstance(model, Model): species = model.get_component_by_id(speciesId, 'species')", "/ Avogadro / V * 1e6 elif units == 'mM':", "<<EMAIL>> :Date: 2016-03-26 :Copyright: 2016-2018, Karr Lab :License: MIT '''", "= match['speciesId'] compartmentId = match['compartmentId'] if isinstance(model, Model): species =", "refactor and replace from matplotlib import pyplot from matplotlib import", "as np import re def plot(model, time = np.zeros(0), species_counts", "'uM': scale = 1 / Avogadro / V * 1e6", "pyplot.plot(time, yData, label=label) #set axis limits pyplot.xlim((0, time[-1])) pyplot.ylim((yMin, yMax))", "fileName = ''): #convert time to hours time = time.copy()", "np import re def plot(model, time = np.zeros(0), species_counts =", "Model): species = model.get_component_by_id(speciesId, 'species') compartment = model.get_component_by_id(compartmentId, 'compartments') yData", "== 'molecules': scale = 1 else: raise Exception('Invalid units \"%s\"'", "== 'molecules': pyplot.ylabel('Copy number') else: pyplot.ylabel('Concentration (%s)' % units) y_formatter", ":Date: 2016-03-26 :Copyright: 2016-2018, Karr Lab :License: MIT ''' #", "plot(model, time = np.zeros(0), species_counts = None, volume = np.zeros(0),", "= species_counts[species_compartment_id] else: raise Exception('Invalid model type %s' % model.__class__.__name__)", "units == 'mM': scale = 1 / Avogadro / V", "#create figure fig = pyplot.figure() #extract data to plot if", "= [], yDatas = {}, units = 'mM', title =", "from wc_lang import Model, Submodel from scipy.constants import Avogadro import", "/ Avogadro / V * 1e3 elif units == 'M':", "= {} for species_compartment_id in selected_species_compartments: #extract data match =", "yMax)) #add axis labels and legend if title: pyplot.title(title) pyplot.xlabel('Time", "Exception('Invalid model type %s' % model.__class__.__name__) #scale if compartmentId ==", "pyplot.ylabel('Copy number') else: pyplot.ylabel('Concentration (%s)' % units) y_formatter = ticker.ScalarFormatter(useOffset=False)", "* 1e12 elif units == 'nM': scale = 1 /", "import re def plot(model, time = np.zeros(0), species_counts = None,", "def plot(model, time = np.zeros(0), species_counts = None, volume =", "'molecules': scale = 1 else: raise Exception('Invalid units \"%s\"' %", "yDatas[species_compartment_id] = yData #plot results yMin = 1e12 yMax =", "import pyplot from matplotlib import ticker from wc_lang import Model,", "= match['compartmentId'] if isinstance(model, Model): species = model.get_component_by_id(speciesId, 'species') compartment", "and replace from matplotlib import pyplot from matplotlib import ticker", "Submodel): yData = species_counts[species_compartment_id] else: raise Exception('Invalid model type %s'", "* 1e9 elif units == 'uM': scale = 1 /", "Avogadro / V * 1e9 elif units == 'uM': scale", "Avogadro / V * 1e0 elif units == 'molecules': scale", "= np.zeros(0), species_counts = None, volume = np.zeros(0), extracellular_volume =", "in selected_species_compartments: #extract data match = re.match('^(?P<speciesId>[a-z0-9\\-_]+)\\[(?P<compartmentId>[a-z0-9\\-_]+)\\]$', species_compartment_id, re.I).groupdict() speciesId", "yData = species_counts[species_compartment_id] else: raise Exception('Invalid model type %s' %" ]
[ "readme = readme_file.read() requirements = [ 'xgboost>=0.90', 'catboost>=0.26', 'bayesian-optimization>=1.2.0', 'numpy>=1.19.5',", "setup, find_packages with open(\"README.md\", \"r\") as readme_file: readme = readme_file.read()", "'seaborn>=0.11.1', 'plotly>=4.4.1', 'pyyaml>=5.4.1' ] setup( name=\"bonsai-tree\", version=\"1.2\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Bayesian", "long_description=readme, url=\"https://github.com/magi-1/bonsai\", packages=find_packages(), package_data={'': ['*.yml']}, install_requires=requirements, license = 'MIT', classifiers=[", "version=\"1.2\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Bayesian Optimization + Gradient Boosted Trees\", long_description=readme,", "= 'MIT', classifiers=[ \"Programming Language :: Python :: 3.6\", \"Programming", "3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language ::", "'MIT', classifiers=[ \"Programming Language :: Python :: 3.6\", \"Programming Language", "= [ 'xgboost>=0.90', 'catboost>=0.26', 'bayesian-optimization>=1.2.0', 'numpy>=1.19.5', 'pandas>=1.1.5', 'matplotlib>=3.2.2', 'seaborn>=0.11.1', 'plotly>=4.4.1',", "find_packages with open(\"README.md\", \"r\") as readme_file: readme = readme_file.read() requirements", "Gradient Boosted Trees\", long_description=readme, url=\"https://github.com/magi-1/bonsai\", packages=find_packages(), package_data={'': ['*.yml']}, install_requires=requirements, license", "as readme_file: readme = readme_file.read() requirements = [ 'xgboost>=0.90', 'catboost>=0.26',", "['*.yml']}, install_requires=requirements, license = 'MIT', classifiers=[ \"Programming Language :: Python", "\"Programming Language :: Python :: 3.7\", \"Programming Language :: Python", "Language :: Python :: 3.7\", \"Programming Language :: Python ::", "Boosted Trees\", long_description=readme, url=\"https://github.com/magi-1/bonsai\", packages=find_packages(), package_data={'': ['*.yml']}, install_requires=requirements, license =", "Language :: Python :: 3.6\", \"Programming Language :: Python ::", "license = 'MIT', classifiers=[ \"Programming Language :: Python :: 3.6\",", ":: Python :: 3.7\", \"Programming Language :: Python :: 3.8\",", "open(\"README.md\", \"r\") as readme_file: readme = readme_file.read() requirements = [", "\"r\") as readme_file: readme = readme_file.read() requirements = [ 'xgboost>=0.90',", "with open(\"README.md\", \"r\") as readme_file: readme = readme_file.read() requirements =", "packages=find_packages(), package_data={'': ['*.yml']}, install_requires=requirements, license = 'MIT', classifiers=[ \"Programming Language", "Python :: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming", "name=\"bonsai-tree\", version=\"1.2\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Bayesian Optimization + Gradient Boosted Trees\",", "] setup( name=\"bonsai-tree\", version=\"1.2\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Bayesian Optimization + Gradient", "install_requires=requirements, license = 'MIT', classifiers=[ \"Programming Language :: Python ::", "'bayesian-optimization>=1.2.0', 'numpy>=1.19.5', 'pandas>=1.1.5', 'matplotlib>=3.2.2', 'seaborn>=0.11.1', 'plotly>=4.4.1', 'pyyaml>=5.4.1' ] setup( name=\"bonsai-tree\",", "'pandas>=1.1.5', 'matplotlib>=3.2.2', 'seaborn>=0.11.1', 'plotly>=4.4.1', 'pyyaml>=5.4.1' ] setup( name=\"bonsai-tree\", version=\"1.2\", author=\"<NAME>\",", "+ Gradient Boosted Trees\", long_description=readme, url=\"https://github.com/magi-1/bonsai\", packages=find_packages(), package_data={'': ['*.yml']}, install_requires=requirements,", "package_data={'': ['*.yml']}, install_requires=requirements, license = 'MIT', classifiers=[ \"Programming Language ::", "Python :: 3.7\", \"Programming Language :: Python :: 3.8\", ],", "\"Programming Language :: Python :: 3.6\", \"Programming Language :: Python", "description=\"Bayesian Optimization + Gradient Boosted Trees\", long_description=readme, url=\"https://github.com/magi-1/bonsai\", packages=find_packages(), package_data={'':", "Optimization + Gradient Boosted Trees\", long_description=readme, url=\"https://github.com/magi-1/bonsai\", packages=find_packages(), package_data={'': ['*.yml']},", "'plotly>=4.4.1', 'pyyaml>=5.4.1' ] setup( name=\"bonsai-tree\", version=\"1.2\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Bayesian Optimization", "requirements = [ 'xgboost>=0.90', 'catboost>=0.26', 'bayesian-optimization>=1.2.0', 'numpy>=1.19.5', 'pandas>=1.1.5', 'matplotlib>=3.2.2', 'seaborn>=0.11.1',", "import setup, find_packages with open(\"README.md\", \"r\") as readme_file: readme =", "author_email=\"<EMAIL>\", description=\"Bayesian Optimization + Gradient Boosted Trees\", long_description=readme, url=\"https://github.com/magi-1/bonsai\", packages=find_packages(),", "from setuptools import setup, find_packages with open(\"README.md\", \"r\") as readme_file:", "readme_file.read() requirements = [ 'xgboost>=0.90', 'catboost>=0.26', 'bayesian-optimization>=1.2.0', 'numpy>=1.19.5', 'pandas>=1.1.5', 'matplotlib>=3.2.2',", "url=\"https://github.com/magi-1/bonsai\", packages=find_packages(), package_data={'': ['*.yml']}, install_requires=requirements, license = 'MIT', classifiers=[ \"Programming", "classifiers=[ \"Programming Language :: Python :: 3.6\", \"Programming Language ::", "author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Bayesian Optimization + Gradient Boosted Trees\", long_description=readme, url=\"https://github.com/magi-1/bonsai\",", "readme_file: readme = readme_file.read() requirements = [ 'xgboost>=0.90', 'catboost>=0.26', 'bayesian-optimization>=1.2.0',", "setuptools import setup, find_packages with open(\"README.md\", \"r\") as readme_file: readme", "'xgboost>=0.90', 'catboost>=0.26', 'bayesian-optimization>=1.2.0', 'numpy>=1.19.5', 'pandas>=1.1.5', 'matplotlib>=3.2.2', 'seaborn>=0.11.1', 'plotly>=4.4.1', 'pyyaml>=5.4.1' ]", "'matplotlib>=3.2.2', 'seaborn>=0.11.1', 'plotly>=4.4.1', 'pyyaml>=5.4.1' ] setup( name=\"bonsai-tree\", version=\"1.2\", author=\"<NAME>\", author_email=\"<EMAIL>\",", "'pyyaml>=5.4.1' ] setup( name=\"bonsai-tree\", version=\"1.2\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Bayesian Optimization +", ":: 3.7\", \"Programming Language :: Python :: 3.8\", ], )", "setup( name=\"bonsai-tree\", version=\"1.2\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Bayesian Optimization + Gradient Boosted", "= readme_file.read() requirements = [ 'xgboost>=0.90', 'catboost>=0.26', 'bayesian-optimization>=1.2.0', 'numpy>=1.19.5', 'pandas>=1.1.5',", ":: Python :: 3.6\", \"Programming Language :: Python :: 3.7\",", "[ 'xgboost>=0.90', 'catboost>=0.26', 'bayesian-optimization>=1.2.0', 'numpy>=1.19.5', 'pandas>=1.1.5', 'matplotlib>=3.2.2', 'seaborn>=0.11.1', 'plotly>=4.4.1', 'pyyaml>=5.4.1'", "<gh_stars>10-100 from setuptools import setup, find_packages with open(\"README.md\", \"r\") as", "Trees\", long_description=readme, url=\"https://github.com/magi-1/bonsai\", packages=find_packages(), package_data={'': ['*.yml']}, install_requires=requirements, license = 'MIT',", "'numpy>=1.19.5', 'pandas>=1.1.5', 'matplotlib>=3.2.2', 'seaborn>=0.11.1', 'plotly>=4.4.1', 'pyyaml>=5.4.1' ] setup( name=\"bonsai-tree\", version=\"1.2\",", ":: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language", "'catboost>=0.26', 'bayesian-optimization>=1.2.0', 'numpy>=1.19.5', 'pandas>=1.1.5', 'matplotlib>=3.2.2', 'seaborn>=0.11.1', 'plotly>=4.4.1', 'pyyaml>=5.4.1' ] setup(" ]
[ "version with open('version.yml', 'r') as f: version = yaml.safe_load(f.read()) #", "yaml.safe_load(f.read()) # Strip \"dev\" out of micro version['micro'] = int(str(version['micro']).replace('dev',", "'dev' + str(version['micro']) # Output version with open('version.yml', 'w') as", "in version with open('version.yml', 'r') as f: version = yaml.safe_load(f.read())", "version string to pathogen/_version.py with open('pathogen/version.py', 'w') as f: f.write(\"__version__", "version['micro'] += 1 # Add \"dev\" back to patch if", "version['micro'] = int(str(version['micro']).replace('dev', '')) # Update patch version['micro'] += 1", "to pathogen/_version.py with open('pathogen/version.py', 'w') as f: f.write(\"__version__ = '{}'\\n\".format(version))", "<<EMAIL>> \"\"\" import yaml # Read in version with open('version.yml',", "version dict to string version = '.'.join([str(version[key]) for key in", "patch version['micro'] += 1 # Add \"dev\" back to patch", "1 # Add \"dev\" back to patch if version['micro'] !=", "out of micro version['micro'] = int(str(version['micro']).replace('dev', '')) # Update patch", "\"\"\" increment_version.py written in Python3 author: <NAME> <<EMAIL>> \"\"\" import", "open('version.yml', 'w') as f: yaml.safe_dump(version, f, sort_keys=False) # Transform version", "version['micro'] != 0: version['micro'] = 'dev' + str(version['micro']) # Output", "\"\"\" import yaml # Read in version with open('version.yml', 'r')", "Output version with open('version.yml', 'w') as f: yaml.safe_dump(version, f, sort_keys=False)", "!= 0: version['micro'] = 'dev' + str(version['micro']) # Output version", "Strip \"dev\" out of micro version['micro'] = int(str(version['micro']).replace('dev', '')) #", "to string version = '.'.join([str(version[key]) for key in ['major', 'minor',", "# Read in version with open('version.yml', 'r') as f: version", "as f: version = yaml.safe_load(f.read()) # Strip \"dev\" out of", "increment_version.py written in Python3 author: <NAME> <<EMAIL>> \"\"\" import yaml", "# Transform version dict to string version = '.'.join([str(version[key]) for", "open('pathogen/version.py', 'w') as f: f.write(\"__version__ = '{}'\\n\".format(version)) # Return print(version)", "'r') as f: version = yaml.safe_load(f.read()) # Strip \"dev\" out", "with open('pathogen/version.py', 'w') as f: f.write(\"__version__ = '{}'\\n\".format(version)) # Return", "Add \"dev\" back to patch if version['micro'] != 0: version['micro']", "with open('version.yml', 'r') as f: version = yaml.safe_load(f.read()) # Strip", "string to pathogen/_version.py with open('pathogen/version.py', 'w') as f: f.write(\"__version__ =", "= '.'.join([str(version[key]) for key in ['major', 'minor', 'micro']]) # Write", "author: <NAME> <<EMAIL>> \"\"\" import yaml # Read in version", "back to patch if version['micro'] != 0: version['micro'] = 'dev'", "f: version = yaml.safe_load(f.read()) # Strip \"dev\" out of micro", "['major', 'minor', 'micro']]) # Write version string to pathogen/_version.py with", "yaml.safe_dump(version, f, sort_keys=False) # Transform version dict to string version", "'minor', 'micro']]) # Write version string to pathogen/_version.py with open('pathogen/version.py',", "Write version string to pathogen/_version.py with open('pathogen/version.py', 'w') as f:", "f, sort_keys=False) # Transform version dict to string version =", "of micro version['micro'] = int(str(version['micro']).replace('dev', '')) # Update patch version['micro']", "# Strip \"dev\" out of micro version['micro'] = int(str(version['micro']).replace('dev', ''))", "<gh_stars>0 \"\"\" increment_version.py written in Python3 author: <NAME> <<EMAIL>> \"\"\"", "written in Python3 author: <NAME> <<EMAIL>> \"\"\" import yaml #", "# Add \"dev\" back to patch if version['micro'] != 0:", "patch if version['micro'] != 0: version['micro'] = 'dev' + str(version['micro'])", "'')) # Update patch version['micro'] += 1 # Add \"dev\"", "+= 1 # Add \"dev\" back to patch if version['micro']", "Python3 author: <NAME> <<EMAIL>> \"\"\" import yaml # Read in", "'w') as f: yaml.safe_dump(version, f, sort_keys=False) # Transform version dict", "pathogen/_version.py with open('pathogen/version.py', 'w') as f: f.write(\"__version__ = '{}'\\n\".format(version)) #", "Transform version dict to string version = '.'.join([str(version[key]) for key", "0: version['micro'] = 'dev' + str(version['micro']) # Output version with", "\"dev\" back to patch if version['micro'] != 0: version['micro'] =", "f: yaml.safe_dump(version, f, sort_keys=False) # Transform version dict to string", "int(str(version['micro']).replace('dev', '')) # Update patch version['micro'] += 1 # Add", "with open('version.yml', 'w') as f: yaml.safe_dump(version, f, sort_keys=False) # Transform", "'micro']]) # Write version string to pathogen/_version.py with open('pathogen/version.py', 'w')", "# Output version with open('version.yml', 'w') as f: yaml.safe_dump(version, f,", "to patch if version['micro'] != 0: version['micro'] = 'dev' +", "= int(str(version['micro']).replace('dev', '')) # Update patch version['micro'] += 1 #", "Read in version with open('version.yml', 'r') as f: version =", "in Python3 author: <NAME> <<EMAIL>> \"\"\" import yaml # Read", "= yaml.safe_load(f.read()) # Strip \"dev\" out of micro version['micro'] =", "sort_keys=False) # Transform version dict to string version = '.'.join([str(version[key])", "version['micro'] = 'dev' + str(version['micro']) # Output version with open('version.yml',", "+ str(version['micro']) # Output version with open('version.yml', 'w') as f:", "key in ['major', 'minor', 'micro']]) # Write version string to", "# Write version string to pathogen/_version.py with open('pathogen/version.py', 'w') as", "open('version.yml', 'r') as f: version = yaml.safe_load(f.read()) # Strip \"dev\"", "version with open('version.yml', 'w') as f: yaml.safe_dump(version, f, sort_keys=False) #", "'.'.join([str(version[key]) for key in ['major', 'minor', 'micro']]) # Write version", "\"dev\" out of micro version['micro'] = int(str(version['micro']).replace('dev', '')) # Update", "version = '.'.join([str(version[key]) for key in ['major', 'minor', 'micro']]) #", "yaml # Read in version with open('version.yml', 'r') as f:", "if version['micro'] != 0: version['micro'] = 'dev' + str(version['micro']) #", "for key in ['major', 'minor', 'micro']]) # Write version string", "micro version['micro'] = int(str(version['micro']).replace('dev', '')) # Update patch version['micro'] +=", "<NAME> <<EMAIL>> \"\"\" import yaml # Read in version with", "dict to string version = '.'.join([str(version[key]) for key in ['major',", "import yaml # Read in version with open('version.yml', 'r') as", "str(version['micro']) # Output version with open('version.yml', 'w') as f: yaml.safe_dump(version,", "as f: yaml.safe_dump(version, f, sort_keys=False) # Transform version dict to", "= 'dev' + str(version['micro']) # Output version with open('version.yml', 'w')", "in ['major', 'minor', 'micro']]) # Write version string to pathogen/_version.py", "Update patch version['micro'] += 1 # Add \"dev\" back to", "# Update patch version['micro'] += 1 # Add \"dev\" back", "version = yaml.safe_load(f.read()) # Strip \"dev\" out of micro version['micro']", "string version = '.'.join([str(version[key]) for key in ['major', 'minor', 'micro']])" ]
[ "= cls._index_ibis_table(data) if selection_mask.dtype == numpy.dtype(\"bool\"): selection_mask = numpy.where(selection_mask)[0] data", "ibis version %s.\" % ibis.__version__ ) if \"hv_row_id__\" in data.columns:", "+= [data.hv_row_id__ >= rows.start] if rows.stop: predicates += [data.hv_row_id__ <", "data.filter(data.hv_row_id__ == rows)[columns] .head(1) .execute() .iloc[0, 0] ) if isinstance(rows,", "predicate = p else: predicate &= p if predicates is", "mask, mask_value=numpy.nan): raise NotImplementedError('Mask is not implemented for IbisInterface.') @classmethod", "nvdim if nvdim else None]) return data, dict(kdims=keys, vdims=values), {}", "Ibis table column, \" \"expecting either ibis.Expr or scalar.\" %", "== 1: return data[dataset.vdims[0].name].execute().iloc[0] return data @classmethod def select_mask(cls, dataset,", "0] @classmethod def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): #", "elif keys == [] and values is None: values =", "predicates.append(column < bound) elif isinstance(object, (set, list)): # rowid conditions", "ibis.expr.operations.Mean, numpy.std: ibis.expr.operations.StandardDev, numpy.nanstd: ibis.expr.operations.StandardDev, numpy.sum: ibis.expr.operations.Sum, numpy.nansum: ibis.expr.operations.Sum, numpy.var:", "cls.compute(dataset) @classmethod @cached def length(self, dataset): # Get the length", "binned = expr.bucket(bins).name('bucket') hist = numpy.zeros(len(bins)-1) hist_bins = binned.value_counts().sort_by('bucket').execute() for", "numpy.dtype(\"bool\"): selection_mask = numpy.where(selection_mask)[0] data = data.filter( data[\"hv_row_id__\"].isin(list(map(int, selection_mask))) ).drop([\"hv_row_id__\"])", "# rowid conditions condition = None for id in object:", "import Interface from . import pandas from .util import cached", "numpy.ndarray): data = cls._index_ibis_table(data) if selection_mask.dtype == numpy.dtype(\"bool\"): selection_mask =", "enumerate(sample): p = data[dims[i].name] == ibis.literal(util.numpy_scalar_to_python(v)) if predicate is None:", "= [x for x in values if x not in", "continue hist[int(b)] = v if weights is not None: raise", "the necesary dimensions index_dims = [dataset.get_dimension(d, strict=True) for d in", "numpy.isscalar(columns): columns = [dataset.get_dimension(columns).name] else: columns = [dataset.get_dimension(d).name for d", ") dropped = [x for x in values if x", "None: keys = list(data.columns[:ndim]) if values is None: values =", "else: aggregation = selection.aggregate( **{ x: function(new[x]).to_expr() for x in", "@classmethod def applies(cls, obj): if not cls.loaded(): return False from", "where=new[x] != 0).to_expr() for x in new.columns if x not", "index_dims] # execute a query against the table to find", "dimensions, function, **kwargs): import ibis.expr.operations data = dataset.data columns =", "None, None if dimension.nodata is not None: return Interface.range(dataset, dimension)", "None for x in (rows.start, rows.stop, rows.step)): predicates = []", "key for key in data.columns[ndim : ((ndim + nvdim) if", "= numpy.where(selection_mask)[0] data = data.filter( data[\"hv_row_id__\"].isin(list(map(int, selection_mask))) ).drop([\"hv_row_id__\"]) elif selection_mask", "(backend,) = validate_backends(list(find_backends(data))) except Exception: backend = data._find_backend() return type(backend).__module__", "in columns } ) else: aggregation = new.aggregate( **{x: function(new[x]).to_expr()", "== [] and values is None: values = list(data.columns[: nvdim", "is needed until ibis updates versions @classmethod def has_rowid(cls): import", "[int(v) if bins.dtype.kind in 'iu' else float(v) for v in", "samples] return data[data[dims[0].name].isin(items)] predicates = None for sample in samples:", "else: predicates.append(column == object) return predicates @classmethod def sample(cls, dataset,", "data.filter([data.hv_row_id__.isin(rows)]).drop([\"hv_row_id__\"]) return data.drop([\"hv_row_id__\"]) @classmethod def unpack_scalar(cls, dataset, data): \"\"\" Given", "else condition | predicate ) if condition is not None:", "not (isinstance(selection_mask, list) and not selection_mask): data = data.filter(selection_mask) if", "values, vdim): import ibis data = dataset.data if dimension.name not", "values = list(data.columns[: nvdim if nvdim else None]) return data,", "elif keys is None: keys = list(data.columns[:ndim]) if values is", "ibis.expr.operations.Variance, numpy.nanvar: ibis.expr.operations.Variance, len: ibis.expr.operations.Count, }.get(function, function) if len(dimensions): selection", "def init(cls, eltype, data, keys, values): params = eltype.param.objects() index", "dataset, dimension, expanded=True, flat=True, compute=True, keep_index=False, ): dimension = dataset.get_dimension(dimension,", "len(dataset.data.columns) @classmethod @cached def dtype(cls, dataset, dimension): dimension = dataset.get_dimension(dimension)", "if predicates is None: predicates = predicate else: predicates |=", "@classmethod @cached def isscalar(cls, dataset, dim): return ( dataset.data[dataset.get_dimension(dim, strict=True).name]", "find the unique groups. groups = dataset.data.groupby(group_by).aggregate().execute() # filter each", "bins.dtype.kind in 'iu' else float(v) for v in bins] binned", "in (rows.start, rows.stop, rows.step)): predicates = [] if rows.start: predicates", "keep_index or not compute else data.execute().values @classmethod def histogram(cls, expr,", "isinstance(columns, slice): columns = [x.name for x in dataset.dimensions()[columns]] elif", ") for i, s in groups.iterrows() ] if issubclass(container_type, NdMapping):", ".count() .compute() == 1 ) @classmethod def select(cls, dataset, selection_mask=None,", "in data.columns: if not isinstance(values, ibis.Expr) and not numpy.isscalar(values): raise", "None: return Interface.range(dataset, dimension) column = dataset.data[dimension.name] return tuple( dataset.data.aggregate([column.min(),", "nonzero(cls, dataset): # Make an empty query to see if", "on ibis if any(x is not None for x in", "params[\"kdims\"] columns = params[\"vdims\"] if isinstance(index.bounds[1], int): ndim = min([index.bounds[1],", "Exception: backend = data._find_backend() return type(backend).__module__ in cls.zero_indexed_backend_modules @classmethod def", "selection_mask.dtype == numpy.dtype(\"bool\"): selection_mask = numpy.where(selection_mask)[0] data = data.filter( data[\"hv_row_id__\"].isin(list(map(int,", "filter each group based on the predicate defined. data =", "still awaiting # a pr on ibis if any(x is", "v in dimensions.items()} ) validate = pandas.PandasInterface.validate reindex = pandas.PandasInterface.reindex", "): dimension = dataset.get_dimension(dimension, strict=True) data = dataset.data[dimension.name] if not", "object = slice(*object) alias = dataset.get_dimension(dim).name column = dataset.data[alias] if", "False from ibis.expr.types import Expr return isinstance(obj, Expr) @classmethod def", "query to see if a row is returned. return bool(len(dataset.data[[]].head(1).execute()))", "if isinstance(columns, slice): columns = [x.name for x in dataset.dimensions()[columns]]", "for k, v in s.to_dict().items()] ), **group_kwargs ), ) for", "data.filter( data[\"hv_row_id__\"].isin(list(map(int, selection_mask))) ).drop([\"hv_row_id__\"]) elif selection_mask is not None and", "is not None: predicates.append(condition) elif callable(object): predicates.append(object(column)) elif isinstance(object, ibis.Expr):", "rows.start: predicates += [data.hv_row_id__ >= rows.start] if rows.stop: predicates +=", "dataset, selection): import ibis predicates = [] for dim, object", "0] ) if isinstance(rows, slice): # We should use a", "predicates.append(condition) elif callable(object): predicates.append(object(column)) elif isinstance(object, ibis.Expr): predicates.append(object) else: predicates.append(column", "else data.execute().values @classmethod def histogram(cls, expr, bins, density=True, weights=None): bins", "hist = numpy.zeros(len(bins)-1) hist_bins = binned.value_counts().sort_by('bucket').execute() for b, v in", "ibis data = dataset.data if dimension.name not in data.columns: if", "= data[columns + values] function = { numpy.min: ibis.expr.operations.Min, numpy.nanmin:", "= [ key for key in data.columns[ndim : ((ndim +", "aggregation = selection.aggregate( **{ x: ibis.expr.operations.Count(new[x], where=new[x] != 0).to_expr() for", "data = cls._index_ibis_table(dataset.data[columns]) if scalar: return ( data.filter(data.hv_row_id__ == rows)[columns]", "data = data.filter( data[\"hv_row_id__\"].isin(list(map(int, selection_mask))) ).drop([\"hv_row_id__\"]) elif selection_mask is not", "x in new.columns if x not in columns } )", "for x in by]) @classmethod def redim(cls, dataset, dimensions): return", "assign %s type as a Ibis table column, \" \"expecting", "import ibis data = dataset.data if dimension.name not in data.columns:", "util.numpy_scalar_to_python(object.stop) predicates.append(column < bound) elif isinstance(object, (set, list)): # rowid", "= cls._index_ibis_table(dataset.data[columns]) if scalar: return ( data.filter(data.hv_row_id__ == rows)[columns] .head(1)", "scalar.\" % type(values).__name__) data = data.mutate(**{dimension.name: values}) return data @classmethod", "id condition = ( predicate if condition is None else", "is None else condition | predicate ) if condition is", "if isinstance(index.bounds[1], int): ndim = min([index.bounds[1], len(index.default)]) else: ndim =", "Given a dataset object and data in the appropriate format", "Iterable): rows = [rows] return data.filter([data.hv_row_id__.isin(rows)]).drop([\"hv_row_id__\"]) return data.drop([\"hv_row_id__\"]) @classmethod def", "def dtype(cls, dataset, dimension): dimension = dataset.get_dimension(dimension) return dataset.data.head(0).execute().dtypes[dimension.name] dimension_type", "values = [c for c in data.columns if c not", "= list(data.columns[: nvdim if nvdim else None]) return data, dict(kdims=keys,", "return data if keep_index or not compute else data.execute().values @classmethod", "def has_rowid(cls): import ibis.expr.operations return hasattr(ibis.expr.operations, \"RowID\") @classmethod def is_rowid_zero_indexed(cls,", "else: predicate &= p if predicates is None: predicates =", "loaded(cls): return \"ibis\" in sys.modules @classmethod def applies(cls, obj): if", "columns } ) else: aggregation = selection.aggregate( **{ x: function(new[x]).to_expr()", "ndim = min([index.bounds[1], len(index.default)]) else: ndim = None nvdim =", "ibis.Expr) and not numpy.isscalar(values): raise ValueError(\"Cannot assign %s type as", "Get the length by counting the length of an empty", "dimension, expanded=True, flat=True, compute=True, keep_index=False, ): dimension = dataset.get_dimension(dimension, strict=True)", "data.columns if c not in keys] elif values and keys", "query against the table to find the unique groups. groups", "dataset.get_dimension(dimension, strict=True) data = dataset.data[dimension.name] if not expanded: data =", "not selection_mask): data = data.filter(selection_mask) if indexed and data.count().execute() ==", "for the interface, return a simple scalar. \"\"\" if len(data.columns)", "predicates @classmethod def sample(cls, dataset, samples=[]): import ibis dims =", "in dimensions.items()} ) validate = pandas.PandasInterface.validate reindex = pandas.PandasInterface.reindex @classmethod", "dataset, dimensions): return dataset.data.mutate( **{v.name: dataset.data[k] for k, v in", "dataset.data[dimension.name] return tuple( dataset.data.aggregate([column.min(), column.max()]).execute().values[0, :] ) @classmethod @cached def", "reverse=False): return dataset.data.sort_by([(dataset.get_dimension(x).name, not reverse) for x in by]) @classmethod", "def sample(cls, dataset, samples=[]): import ibis dims = dataset.dimensions() data", "and data in the appropriate format for the interface, return", "index = params[\"kdims\"] columns = params[\"vdims\"] if isinstance(index.bounds[1], int): ndim", "None if keys and values is None: values = [c", "= { numpy.min: ibis.expr.operations.Min, numpy.nanmin: ibis.expr.operations.Min, numpy.max: ibis.expr.operations.Max, numpy.nanmax: ibis.expr.operations.Max,", "return \"ibis\" in sys.modules @classmethod def applies(cls, obj): if not", "= [dataset.get_dimension(columns).name] else: columns = [dataset.get_dimension(d).name for d in columns]", "= dataset.dataset group_by = [d.name for d in index_dims] #", "for x in new.columns if x not in columns }", "dataset, dim): return ( dataset.data[dataset.get_dimension(dim, strict=True).name] .distinct() .count() .compute() ==", "columns = params[\"vdims\"] if isinstance(index.bounds[1], int): ndim = min([index.bounds[1], len(index.default)])", "not None: predicates.append(condition) elif callable(object): predicates.append(object(column)) elif isinstance(object, ibis.Expr): predicates.append(object)", "new.columns if x not in columns } ) else: aggregation", "group_type( dataset.data.filter( [dataset.data[k] == v for k, v in s.to_dict().items()]", "1: return data[dataset.vdims[0].name].execute().iloc[0] return data @classmethod def select_mask(cls, dataset, selection):", "samples): items = [s[0] if isinstance(s, tuple) else s for", "keys = list(data.columns[:ndim]) if values is None: values = [", "= [d.name for d in dataset.kdims if d in dimensions]", ") else: aggregation = new.aggregate( **{x: function(new[x]).to_expr() for x in", "== 1 ) @classmethod def select(cls, dataset, selection_mask=None, **selection): if", "None: selection_mask = cls.select_mask(dataset, selection) indexed = cls.indexed(dataset, selection) data", "ibis.expr.operations.Mean, numpy.nanmean: ibis.expr.operations.Mean, numpy.std: ibis.expr.operations.StandardDev, numpy.nanstd: ibis.expr.operations.StandardDev, numpy.sum: ibis.expr.operations.Sum, numpy.nansum:", "if not isinstance(rows, Iterable): rows = [rows] return data.filter([data.hv_row_id__.isin(rows)]).drop([\"hv_row_id__\"]) return", "= dict(util.get_param_values(dataset), kdims=element_dims) group_kwargs.update(kwargs) group_kwargs[\"dataset\"] = dataset.dataset group_by = [d.name", "reverse) for x in by]) @classmethod def redim(cls, dataset, dimensions):", "ibis.expr.operations.Sum, numpy.nansum: ibis.expr.operations.Sum, numpy.var: ibis.expr.operations.Variance, numpy.nanvar: ibis.expr.operations.Variance, len: ibis.expr.operations.Count, }.get(function,", "else: return container_type(data) @classmethod def assign(cls, dataset, new_data): return dataset.data.mutate(**new_data)", "@classmethod def compute(cls, dataset): return dataset.clone(dataset.data.execute()) @classmethod def persist(cls, dataset):", ".distinct() .count() .compute() == 1 ) @classmethod def select(cls, dataset,", "issubclass(group_type, Element): group_kwargs = dict(util.get_param_values(dataset), kdims=element_dims) group_kwargs.update(kwargs) group_kwargs[\"dataset\"] = dataset.dataset", "[data.hv_row_id__ >= rows.start] if rows.stop: predicates += [data.hv_row_id__ < rows.stop]", "slice(*object) alias = dataset.get_dimension(dim).name column = dataset.data[alias] if isinstance(object, slice):", "continue predicate = None for i, v in enumerate(sample): p", "selection = new.groupby(columns) if function is numpy.count_nonzero: aggregation = selection.aggregate(", "predicates is None: predicates = predicate else: predicates |= predicate", "dataset, dimension): dimension = dataset.get_dimension(dimension) return dataset.data.head(0).execute().dtypes[dimension.name] dimension_type = dtype", ". import pandas from .util import cached class IbisInterface(Interface): types", "not reverse) for x in by]) @classmethod def redim(cls, dataset,", "Interface.range(dataset, dimension) column = dataset.data[dimension.name] return tuple( dataset.data.aggregate([column.min(), column.max()]).execute().values[0, :]", "@classmethod def is_rowid_zero_indexed(cls, data): try: from ibis.client import find_backends, validate_backends", "dataset): return cls.length(dataset), len(dataset.data.columns) @classmethod @cached def dtype(cls, dataset, dimension):", "version %s.\" % ibis.__version__ ) if \"hv_row_id__\" in data.columns: return", "format for the interface, return a simple scalar. \"\"\" if", "dropped = [x for x in values if x not", "def histogram(cls, expr, bins, density=True, weights=None): bins = numpy.asarray(bins) bins", "@classmethod def unpack_scalar(cls, dataset, data): \"\"\" Given a dataset object", "= cls.indexed(dataset, selection) data = dataset.data if isinstance(selection_mask, numpy.ndarray): data", "= ( predicate if condition is None else condition |", "= dataset.get_dimension(dimension, strict=True) if cls.dtype(dataset, dimension).kind in 'SUO': return None,", "0).to_expr() for x in new.columns if x not in columns", "NotImplementedError('Mask is not implemented for IbisInterface.') @classmethod @cached def dframe(cls,", "v if weights is not None: raise NotImplementedError(\"Weighted histograms currently", "predicates.append(bound <= column) if object.stop is not None: bound =", "ibis.expr.operations data = dataset.data columns = [d.name for d in", "numpy try: from collections.abc import Iterable except ImportError: from collections", "= [ ( tuple(s.values.tolist()), group_type( dataset.data.filter( [dataset.data[k] == v for", "data @classmethod def select_mask(cls, dataset, selection): import ibis predicates =", "None else data.filter(predicates) @classmethod def aggregate(cls, dataset, dimensions, function, **kwargs):", "dataset.dimensions() data = dataset.data if all(util.isscalar(s) or len(s) == 1", "= [sample] if not sample: continue predicate = None for", "and not (isinstance(selection_mask, list) and not selection_mask): data = data.filter(selection_mask)", "in data.columns] return aggregation, dropped @classmethod @cached def mask(cls, dataset,", "weights=None): bins = numpy.asarray(bins) bins = [int(v) if bins.dtype.kind in", "@classmethod def add_dimension(cls, dataset, dimension, dim_pos, values, vdim): import ibis", "return dataset.data.head(0).execute().dtypes[dimension.name] dimension_type = dtype @classmethod def sort(cls, dataset, by=[],", "[rows] return data.filter([data.hv_row_id__.isin(rows)]).drop([\"hv_row_id__\"]) return data.drop([\"hv_row_id__\"]) @classmethod def unpack_scalar(cls, dataset, data):", "predicate if condition is None else condition | predicate )", "ibis if not cls.has_rowid(): raise ValueError( \"iloc expressions are not", "for d in dimensions] element_dims = [kdim for kdim in", "new_data): return dataset.data.mutate(**new_data) @classmethod def add_dimension(cls, dataset, dimension, dim_pos, values,", "dataset): # Get the length by counting the length of", "for kdim in dataset.kdims if kdim not in index_dims] group_kwargs", "return isinstance(obj, Expr) @classmethod def init(cls, eltype, data, keys, values):", "dataset): return cls.compute(dataset) @classmethod @cached def length(self, dataset): # Get", "appropriate format for the interface, return a simple scalar. \"\"\"", "return bool(len(dataset.data[[]].head(1).execute())) @classmethod @cached def range(cls, dataset, dimension): dimension =", "= [ 'ibis.backends.omniscidb.client', ] # the rowid is needed until", "None if dimension.nodata is not None: return Interface.range(dataset, dimension) column", "\"\"\" if len(data.columns) > 1 or data[[]].count().execute() != 1: return", "= [s[0] if isinstance(s, tuple) else s for s in", "aggregation = selection.aggregate( **{ x: function(new[x]).to_expr() for x in new.columns", "dimensions index_dims = [dataset.get_dimension(d, strict=True) for d in dimensions] element_dims", "_index_ibis_table(cls, data): import ibis if not cls.has_rowid(): raise ValueError( \"iloc", "dataset.get_dimension(dimension, strict=True) if cls.dtype(dataset, dimension).kind in 'SUO': return None, None", "the length of an empty query. return dataset.data[[]].count().execute() @classmethod @cached", "(set, list)): # rowid conditions condition = None for id", "hist[int(b)] = v if weights is not None: raise NotImplementedError(\"Weighted", "in data.columns if c not in values][:ndim] elif keys is", "**{ x: ibis.expr.operations.Count(new[x], where=new[x] != 0).to_expr() for x in new.columns", "implemented for IbisInterface.\") if density: hist = hist/expr.count().execute() return hist,", "or len(s) == 1 for s in samples): items =", "callable(object): predicates.append(object(column)) elif isinstance(object, ibis.Expr): predicates.append(object) else: predicates.append(column == object)", "if nvdim else None]) return data, dict(kdims=keys, vdims=values), {} @classmethod", "and len(dataset.vdims) == 1: return data[dataset.vdims[0].name].execute().iloc[0] return data @classmethod def", "in dataset.kdims if kdim not in index_dims] group_kwargs = {}", "numpy.asarray(bins) bins = [int(v) if bins.dtype.kind in 'iu' else float(v)", "as a Ibis table column, \" \"expecting either ibis.Expr or", "data.columns: if not isinstance(values, ibis.Expr) and not numpy.isscalar(values): raise ValueError(\"Cannot", "= dtype @classmethod def sort(cls, dataset, by=[], reverse=False): return dataset.data.sort_by([(dataset.get_dimension(x).name,", "by=[], reverse=False): return dataset.data.sort_by([(dataset.get_dimension(x).name, not reverse) for x in by])", "Make an empty query to see if a row is", "values}) return data @classmethod @cached def isscalar(cls, dataset, dim): return", "pr on ibis if any(x is not None for x", "function(new[x]).to_expr() for x in new.columns if x not in columns", "not None for x in (rows.start, rows.stop, rows.step)): predicates =", "ibis.expr.types import Expr return isinstance(obj, Expr) @classmethod def init(cls, eltype,", "numpy.nanvar: ibis.expr.operations.Variance, len: ibis.expr.operations.Count, }.get(function, function) if len(dimensions): selection =", "and not numpy.isscalar(values): raise ValueError(\"Cannot assign %s type as a", "the unique groups. groups = dataset.data.groupby(group_by).aggregate().execute() # filter each group", "if not sample: continue predicate = None for i, v", "@classmethod @cached def range(cls, dataset, dimension): dimension = dataset.get_dimension(dimension, strict=True)", "kdims=index_dims) else: return container_type(data) @classmethod def assign(cls, dataset, new_data): return", "None: predicate = p else: predicate &= p if predicates", "numpy.nanmin: ibis.expr.operations.Min, numpy.max: ibis.expr.operations.Max, numpy.nanmax: ibis.expr.operations.Max, numpy.mean: ibis.expr.operations.Mean, numpy.nanmean: ibis.expr.operations.Mean,", "a dataset object and data in the appropriate format for", "persist(cls, dataset): return cls.compute(dataset) @classmethod @cached def length(self, dataset): #", "def select_mask(cls, dataset, selection): import ibis predicates = [] for", "\" \"not implemented for IbisInterface.\") if density: hist = hist/expr.count().execute()", "is None: keys = [c for c in data.columns if", "def applies(cls, obj): if not cls.loaded(): return False from ibis.expr.types", "scalar. \"\"\" if len(data.columns) > 1 or data[[]].count().execute() != 1:", "in dimensions] element_dims = [kdim for kdim in dataset.kdims if", "in object: predicate = column == id condition = (", "= selection.aggregate( **{ x: function(new[x]).to_expr() for x in new.columns if", "samples=[]): import ibis dims = dataset.dimensions() data = dataset.data if", "isinstance(rows, Iterable): rows = [rows] return data.filter([data.hv_row_id__.isin(rows)]).drop([\"hv_row_id__\"]) return data.drop([\"hv_row_id__\"]) @classmethod", "data = dataset.data if isinstance(selection_mask, numpy.ndarray): data = cls._index_ibis_table(data) if", "data @classmethod @cached def isscalar(cls, dataset, dim): return ( dataset.data[dataset.get_dimension(dim,", "in cls.zero_indexed_backend_modules @classmethod def loaded(cls): return \"ibis\" in sys.modules @classmethod", "import numpy try: from collections.abc import Iterable except ImportError: from", "data.filter(selection_mask) if indexed and data.count().execute() == 1 and len(dataset.vdims) ==", "def redim(cls, dataset, dimensions): return dataset.data.mutate( **{v.name: dataset.data[k] for k,", "@classmethod @cached def mask(cls, dataset, mask, mask_value=numpy.nan): raise NotImplementedError('Mask is", "expr, bins, density=True, weights=None): bins = numpy.asarray(bins) bins = [int(v)", "in by]) @classmethod def redim(cls, dataset, dimensions): return dataset.data.mutate( **{v.name:", "[c for c in data.columns if c not in keys]", "data.drop([\"hv_row_id__\"]) @classmethod def unpack_scalar(cls, dataset, data): \"\"\" Given a dataset", "None]) return data, dict(kdims=keys, vdims=values), {} @classmethod def compute(cls, dataset):", "None for id in object: predicate = column == id", "dimensions] values = dataset.dimensions(\"value\", label=\"name\") new = data[columns + values]", "dataset): return dataset.clone(dataset.data.execute()) @classmethod def persist(cls, dataset): return cls.compute(dataset) @classmethod", "[d.name for d in index_dims] # execute a query against", "= column == id condition = ( predicate if condition", "dataset, dimensions, container_type, group_type, **kwargs): # aggregate the necesary dimensions", "cls.zero_indexed_backend_modules @classmethod def loaded(cls): return \"ibis\" in sys.modules @classmethod def", "selection.aggregate( **{ x: ibis.expr.operations.Count(new[x], where=new[x] != 0).to_expr() for x in", "zero_indexed_backend_modules = [ 'ibis.backends.omniscidb.client', ] # the rowid is needed", "[sample] if not sample: continue predicate = None for i,", "dimension): dimension = dataset.get_dimension(dimension, strict=True) if cls.dtype(dataset, dimension).kind in 'SUO':", "return ( dataset.data[dataset.get_dimension(dim, strict=True).name] .distinct() .count() .compute() == 1 )", "return data.mutate(hv_row_id__=data.rowid() - 1) @classmethod def iloc(cls, dataset, index): rows,", "[kdim for kdim in dataset.kdims if kdim not in index_dims]", "\"ibis\" default_partitions = 100 zero_indexed_backend_modules = [ 'ibis.backends.omniscidb.client', ] #", "((ndim + nvdim) if nvdim else None)] if key not", "group based on the predicate defined. data = [ (", "strict=True) if cls.dtype(dataset, dimension).kind in 'SUO': return None, None if", "if function is numpy.count_nonzero: aggregation = selection.aggregate( **{ x: ibis.expr.operations.Count(new[x],", "not isinstance(rows, Iterable): rows = [rows] return data.filter([data.hv_row_id__.isin(rows)]).drop([\"hv_row_id__\"]) return data.drop([\"hv_row_id__\"])", "column == id condition = ( predicate if condition is", "if isinstance(object, slice): if object.start is not None: # Workaround", "= dataset.data[dimension.name] if not expanded: data = data.distinct() return data", "keys is None: keys = list(data.columns[:ndim]) if values is None:", "**group_kwargs ), ) for i, s in groups.iterrows() ] if", "ibis.client import find_backends, validate_backends (backend,) = validate_backends(list(find_backends(data))) except Exception: backend", "= params[\"vdims\"] if isinstance(index.bounds[1], int): ndim = min([index.bounds[1], len(index.default)]) else:", "indexed = cls.indexed(dataset, selection) data = dataset.data if isinstance(selection_mask, numpy.ndarray):", "= [rows] return data.filter([data.hv_row_id__.isin(rows)]).drop([\"hv_row_id__\"]) return data.drop([\"hv_row_id__\"]) @classmethod def unpack_scalar(cls, dataset,", ": ((ndim + nvdim) if nvdim else None)] if key", "not in index_dims] group_kwargs = {} if group_type != \"raw\"", "sample in samples: if util.isscalar(sample): sample = [sample] if not", "dataset, dimensions, function, **kwargs): import ibis.expr.operations data = dataset.data columns", "None nvdim = columns.bounds[1] if isinstance(columns.bounds[1], int) else None if", "= p else: predicate &= p if predicates is None:", "(isinstance(selection_mask, list) and not selection_mask): data = data.filter(selection_mask) if indexed", "each group based on the predicate defined. data = [", "IbisInterface.') @classmethod @cached def dframe(cls, dataset, dimensions): return dataset.data[dimensions].execute() Interface.register(IbisInterface)", "data[data[dims[0].name].isin(items)] predicates = None for sample in samples: if util.isscalar(sample):", "data): try: from ibis.client import find_backends, validate_backends (backend,) = validate_backends(list(find_backends(data)))", "range(cls, dataset, dimension): dimension = dataset.get_dimension(dimension, strict=True) if cls.dtype(dataset, dimension).kind", "@classmethod def select(cls, dataset, selection_mask=None, **selection): if selection_mask is None:", "mask(cls, dataset, mask, mask_value=numpy.nan): raise NotImplementedError('Mask is not implemented for", "not numpy.isscalar(values): raise ValueError(\"Cannot assign %s type as a Ibis", "currently \" \"not implemented for IbisInterface.\") if density: hist =", "values if x not in data.columns] return aggregation, dropped @classmethod", "numpy.nanmax: ibis.expr.operations.Max, numpy.mean: ibis.expr.operations.Mean, numpy.nanmean: ibis.expr.operations.Mean, numpy.std: ibis.expr.operations.StandardDev, numpy.nanstd: ibis.expr.operations.StandardDev,", "not in keys ] elif keys == [] and values", "dataset.dimensions()[columns]] elif numpy.isscalar(columns): columns = [dataset.get_dimension(columns).name] else: columns = [dataset.get_dimension(d).name", "the table to find the unique groups. groups = dataset.data.groupby(group_by).aggregate().execute()", "data = dataset.data[dimension.name] if not expanded: data = data.distinct() return", "== 1 and len(dataset.vdims) == 1: return data[dataset.vdims[0].name].execute().iloc[0] return data", "in dataset.kdims if d in dimensions] values = dataset.dimensions(\"value\", label=\"name\")", "list) and not selection_mask): data = data.filter(selection_mask) if indexed and", "not in data.columns: if not isinstance(values, ibis.Expr) and not numpy.isscalar(values):", "aggregate(cls, dataset, dimensions, function, **kwargs): import ibis.expr.operations data = dataset.data", "return data.mutate(hv_row_id__=data.rowid()) else: return data.mutate(hv_row_id__=data.rowid() - 1) @classmethod def iloc(cls,", "() datatype = \"ibis\" default_partitions = 100 zero_indexed_backend_modules = [", "kdim not in index_dims] group_kwargs = {} if group_type !=", "import Element from ..ndmapping import NdMapping, item_check, sorted_context from .interface", "if not expanded: data = data.distinct() return data if keep_index", "dropped @classmethod @cached def mask(cls, dataset, mask, mask_value=numpy.nan): raise NotImplementedError('Mask", "not cls.has_rowid(): raise ValueError( \"iloc expressions are not supported for", "), ) for i, s in groups.iterrows() ] if issubclass(container_type,", "def loaded(cls): return \"ibis\" in sys.modules @classmethod def applies(cls, obj):", "numpy.nansum: ibis.expr.operations.Sum, numpy.var: ibis.expr.operations.Variance, numpy.nanvar: ibis.expr.operations.Variance, len: ibis.expr.operations.Count, }.get(function, function)", "if group_type != \"raw\" and issubclass(group_type, Element): group_kwargs = dict(util.get_param_values(dataset),", "pandas from .util import cached class IbisInterface(Interface): types = ()", "init(cls, eltype, data, keys, values): params = eltype.param.objects() index =", "def persist(cls, dataset): return cls.compute(dataset) @classmethod @cached def length(self, dataset):", "[x for x in values if x not in data.columns]", "return data @classmethod @cached def isscalar(cls, dataset, dim): return (", "hasattr(ibis.expr.operations, \"RowID\") @classmethod def is_rowid_zero_indexed(cls, data): try: from ibis.client import", "in samples): items = [s[0] if isinstance(s, tuple) else s", "elif callable(object): predicates.append(object(column)) elif isinstance(object, ibis.Expr): predicates.append(object) else: predicates.append(column ==", "ibis.Expr or scalar.\" % type(values).__name__) data = data.mutate(**{dimension.name: values}) return", "if condition is not None: predicates.append(condition) elif callable(object): predicates.append(object(column)) elif", "or not compute else data.execute().values @classmethod def histogram(cls, expr, bins,", ") if \"hv_row_id__\" in data.columns: return data if cls.is_rowid_zero_indexed(data): return", "ibis.__version__ ) if \"hv_row_id__\" in data.columns: return data if cls.is_rowid_zero_indexed(data):", "if bins.dtype.kind in 'iu' else float(v) for v in bins]", "None: values = [ key for key in data.columns[ndim :", "len: ibis.expr.operations.Count, }.get(function, function) if len(dimensions): selection = new.groupby(columns) if", "values and keys is None: keys = [c for c", "number but i think that is still awaiting # a", "= [d.name for d in index_dims] # execute a query", "expr.bucket(bins).name('bucket') hist = numpy.zeros(len(bins)-1) hist_bins = binned.value_counts().sort_by('bucket').execute() for b, v", "@cached def dtype(cls, dataset, dimension): dimension = dataset.get_dimension(dimension) return dataset.data.head(0).execute().dtypes[dimension.name]", "for dim, object in selection.items(): if isinstance(object, tuple): object =", "label=\"name\") new = data[columns + values] function = { numpy.min:", "IbisInterface(Interface): types = () datatype = \"ibis\" default_partitions = 100", "empty query to see if a row is returned. return", "the interface, return a simple scalar. \"\"\" if len(data.columns) >", "in sys.modules @classmethod def applies(cls, obj): if not cls.loaded(): return", "assign(cls, dataset, new_data): return dataset.data.mutate(**new_data) @classmethod def add_dimension(cls, dataset, dimension,", "object and data in the appropriate format for the interface,", "for IbisInterface.') @classmethod @cached def dframe(cls, dataset, dimensions): return dataset.data[dimensions].execute()", "cls.has_rowid(): raise ValueError( \"iloc expressions are not supported for ibis", "..ndmapping import NdMapping, item_check, sorted_context from .interface import Interface from", "the predicate defined. data = [ ( tuple(s.values.tolist()), group_type( dataset.data.filter(", "is None: predicate = p else: predicate &= p if", "k, v in dimensions.items()} ) validate = pandas.PandasInterface.validate reindex =", "if isinstance(s, tuple) else s for s in samples] return", ".compute() == 1 ) @classmethod def select(cls, dataset, selection_mask=None, **selection):", "x in (rows.start, rows.stop, rows.step)): predicates = [] if rows.start:", "by]) @classmethod def redim(cls, dataset, dimensions): return dataset.data.mutate( **{v.name: dataset.data[k]", "# Make an empty query to see if a row", "to find the unique groups. groups = dataset.data.groupby(group_by).aggregate().execute() # filter", "**{v.name: dataset.data[k] for k, v in dimensions.items()} ) validate =", "return data.filter(predicates).drop([\"hv_row_id__\"]) else: if not isinstance(rows, Iterable): rows = [rows]", "def add_dimension(cls, dataset, dimension, dim_pos, values, vdim): import ibis data", "data = dataset.data if all(util.isscalar(s) or len(s) == 1 for", "index)) if isinstance(columns, slice): columns = [x.name for x in", "else: ndim = None nvdim = columns.bounds[1] if isinstance(columns.bounds[1], int)", "dtype @classmethod def sort(cls, dataset, by=[], reverse=False): return dataset.data.sort_by([(dataset.get_dimension(x).name, not", "from collections.abc import Iterable except ImportError: from collections import Iterable", "function = { numpy.min: ibis.expr.operations.Min, numpy.nanmin: ibis.expr.operations.Min, numpy.max: ibis.expr.operations.Max, numpy.nanmax:", "for id in object: predicate = column == id condition", "return dataset.data[[]].count().execute() @classmethod @cached def nonzero(cls, dataset): # Make an", "% ibis.__version__ ) if \"hv_row_id__\" in data.columns: return data if", "sample(cls, dataset, samples=[]): import ibis dims = dataset.dimensions() data =", "@cached def values( cls, dataset, dimension, expanded=True, flat=True, compute=True, keep_index=False,", "shape(cls, dataset): return cls.length(dataset), len(dataset.data.columns) @classmethod @cached def dtype(cls, dataset,", "tuple(s.values.tolist()), group_type( dataset.data.filter( [dataset.data[k] == v for k, v in", "import ibis dims = dataset.dimensions() data = dataset.data if all(util.isscalar(s)", "dict(kdims=keys, vdims=values), {} @classmethod def compute(cls, dataset): return dataset.clone(dataset.data.execute()) @classmethod", "None)] if key not in keys ] elif keys ==", ").drop([\"hv_row_id__\"]) elif selection_mask is not None and not (isinstance(selection_mask, list)", "data return data.execute().iat[0, 0] @classmethod def groupby(cls, dataset, dimensions, container_type,", "empty query. return dataset.data[[]].count().execute() @classmethod @cached def nonzero(cls, dataset): #", "numpy.mean: ibis.expr.operations.Mean, numpy.nanmean: ibis.expr.operations.Mean, numpy.std: ibis.expr.operations.StandardDev, numpy.nanstd: ibis.expr.operations.StandardDev, numpy.sum: ibis.expr.operations.Sum,", "values][:ndim] elif keys is None: keys = list(data.columns[:ndim]) if values", "elif isinstance(object, ibis.Expr): predicates.append(object) else: predicates.append(column == object) return predicates", "predicates = [] for dim, object in selection.items(): if isinstance(object,", "= pandas.PandasInterface.validate reindex = pandas.PandasInterface.reindex @classmethod def _index_ibis_table(cls, data): import", "columns] data = cls._index_ibis_table(dataset.data[columns]) if scalar: return ( data.filter(data.hv_row_id__ ==", "[] if rows.start: predicates += [data.hv_row_id__ >= rows.start] if rows.stop:", "group_kwargs = {} if group_type != \"raw\" and issubclass(group_type, Element):", "for s in samples): items = [s[0] if isinstance(s, tuple)", "@classmethod def iloc(cls, dataset, index): rows, columns = index scalar", "ndim = None nvdim = columns.bounds[1] if isinstance(columns.bounds[1], int) else", "from . import pandas from .util import cached class IbisInterface(Interface):", "dataset): # Make an empty query to see if a", "necesary dimensions index_dims = [dataset.get_dimension(d, strict=True) for d in dimensions]", "for c in data.columns if c not in keys] elif", "keys and values is None: values = [c for c", "aggregate the necesary dimensions index_dims = [dataset.get_dimension(d, strict=True) for d", "data.columns if c not in values][:ndim] elif keys is None:", "dataset.data.filter( [dataset.data[k] == v for k, v in s.to_dict().items()] ),", "in data.columns: return data if cls.is_rowid_zero_indexed(data): return data.mutate(hv_row_id__=data.rowid()) else: return", "tuple) else s for s in samples] return data[data[dims[0].name].isin(items)] predicates", "data.columns: return data if cls.is_rowid_zero_indexed(data): return data.mutate(hv_row_id__=data.rowid()) else: return data.mutate(hv_row_id__=data.rowid()", "rows.stop, rows.step)): predicates = [] if rows.start: predicates += [data.hv_row_id__", "unpack_scalar(cls, dataset, data): \"\"\" Given a dataset object and data", "# aggregate the necesary dimensions index_dims = [dataset.get_dimension(d, strict=True) for", "compute else data.execute().values @classmethod def histogram(cls, expr, bins, density=True, weights=None):", "<= column) if object.stop is not None: bound = util.numpy_scalar_to_python(object.stop)", "predicates.append(column == object) return predicates @classmethod def sample(cls, dataset, samples=[]):", "= expr.bucket(bins).name('bucket') hist = numpy.zeros(len(bins)-1) hist_bins = binned.value_counts().sort_by('bucket').execute() for b,", "= [x.name for x in dataset.dimensions()[columns]] elif numpy.isscalar(columns): columns =", "group_type != \"raw\" and issubclass(group_type, Element): group_kwargs = dict(util.get_param_values(dataset), kdims=element_dims)", "params = eltype.param.objects() index = params[\"kdims\"] columns = params[\"vdims\"] if", "if isinstance(selection_mask, numpy.ndarray): data = cls._index_ibis_table(data) if selection_mask.dtype == numpy.dtype(\"bool\"):", "return dataset.data.mutate(**new_data) @classmethod def add_dimension(cls, dataset, dimension, dim_pos, values, vdim):", "None: predicates.append(condition) elif callable(object): predicates.append(object(column)) elif isinstance(object, ibis.Expr): predicates.append(object) else:", "in keys ] elif keys == [] and values is", "not supported for ibis version %s.\" % ibis.__version__ ) if", "!= 0).to_expr() for x in new.columns if x not in", "cached class IbisInterface(Interface): types = () datatype = \"ibis\" default_partitions", "in bins] binned = expr.bucket(bins).name('bucket') hist = numpy.zeros(len(bins)-1) hist_bins =", "len(data.columns) > 1 or data[[]].count().execute() != 1: return data return", "= None for sample in samples: if util.isscalar(sample): sample =", "= [dataset.get_dimension(d).name for d in columns] data = cls._index_ibis_table(dataset.data[columns]) if", "dimensions, container_type, group_type, **kwargs): # aggregate the necesary dimensions index_dims", "= pandas.PandasInterface.reindex @classmethod def _index_ibis_table(cls, data): import ibis if not", "= dataset.get_dimension(dimension, strict=True) data = dataset.data[dimension.name] if not expanded: data", "@classmethod def histogram(cls, expr, bins, density=True, weights=None): bins = numpy.asarray(bins)", "v in bins] binned = expr.bucket(bins).name('bucket') hist = numpy.zeros(len(bins)-1) hist_bins", "column, \" \"expecting either ibis.Expr or scalar.\" % type(values).__name__) data", "type(backend).__module__ in cls.zero_indexed_backend_modules @classmethod def loaded(cls): return \"ibis\" in sys.modules", "class IbisInterface(Interface): types = () datatype = \"ibis\" default_partitions =", "isinstance(object, (set, list)): # rowid conditions condition = None for", "not None: return Interface.range(dataset, dimension) column = dataset.data[dimension.name] return tuple(", "an empty query. return dataset.data[[]].count().execute() @classmethod @cached def nonzero(cls, dataset):", "new.aggregate( **{x: function(new[x]).to_expr() for x in new.columns} ) dropped =", "- 1) @classmethod def iloc(cls, dataset, index): rows, columns =", "for x in new.columns} ) dropped = [x for x", "iloc(cls, dataset, index): rows, columns = index scalar = all(map(util.isscalar,", "None for i, v in enumerate(sample): p = data[dims[i].name] ==", "for dask issue #3392 bound = util.numpy_scalar_to_python(object.start) predicates.append(bound <= column)", "cls.length(dataset), len(dataset.data.columns) @classmethod @cached def dtype(cls, dataset, dimension): dimension =", "in keys] elif values and keys is None: keys =", "'iu' else float(v) for v in bins] binned = expr.bucket(bins).name('bucket')", "def iloc(cls, dataset, index): rows, columns = index scalar =", "is None: values = list(data.columns[: nvdim if nvdim else None])", "d in dimensions] element_dims = [kdim for kdim in dataset.kdims", "compute(cls, dataset): return dataset.clone(dataset.data.execute()) @classmethod def persist(cls, dataset): return cls.compute(dataset)", "sample = [sample] if not sample: continue predicate = None", "ValueError(\"Cannot assign %s type as a Ibis table column, \"", ".interface import Interface from . import pandas from .util import", "return data if cls.is_rowid_zero_indexed(data): return data.mutate(hv_row_id__=data.rowid()) else: return data.mutate(hv_row_id__=data.rowid() -", "**kwargs): import ibis.expr.operations data = dataset.data columns = [d.name for", "data.execute().values @classmethod def histogram(cls, expr, bins, density=True, weights=None): bins =", "c not in values][:ndim] elif keys is None: keys =", "datatype = \"ibis\" default_partitions = 100 zero_indexed_backend_modules = [ 'ibis.backends.omniscidb.client',", "is not None: raise NotImplementedError(\"Weighted histograms currently \" \"not implemented", "from collections import Iterable from .. import util from ..element", "None and not (isinstance(selection_mask, list) and not selection_mask): data =", "group_kwargs[\"dataset\"] = dataset.dataset group_by = [d.name for d in index_dims]", "dataset, mask, mask_value=numpy.nan): raise NotImplementedError('Mask is not implemented for IbisInterface.')", "if c not in values][:ndim] elif keys is None: keys", "isinstance(values, ibis.Expr) and not numpy.isscalar(values): raise ValueError(\"Cannot assign %s type", "import sys import numpy try: from collections.abc import Iterable except", "sorted_context from .interface import Interface from . import pandas from", "the rowid is needed until ibis updates versions @classmethod def", "bins = numpy.asarray(bins) bins = [int(v) if bins.dtype.kind in 'iu'", "for b, v in zip(hist_bins['bucket'], hist_bins['count']): if numpy.isnan(b): continue hist[int(b)]", "implemented for IbisInterface.') @classmethod @cached def dframe(cls, dataset, dimensions): return", "predicates is None else data.filter(predicates) @classmethod def aggregate(cls, dataset, dimensions,", "s for s in samples] return data[data[dims[0].name].isin(items)] predicates = None", "key in data.columns[ndim : ((ndim + nvdim) if nvdim else", "for i, s in groups.iterrows() ] if issubclass(container_type, NdMapping): with", "dimension = dataset.get_dimension(dimension) return dataset.data.head(0).execute().dtypes[dimension.name] dimension_type = dtype @classmethod def", "nvdim = columns.bounds[1] if isinstance(columns.bounds[1], int) else None if keys", "a Ibis table column, \" \"expecting either ibis.Expr or scalar.\"", "else: columns = [dataset.get_dimension(d).name for d in columns] data =", "if predicates is None else data.filter(predicates) @classmethod def aggregate(cls, dataset,", "x in by]) @classmethod def redim(cls, dataset, dimensions): return dataset.data.mutate(", "= new.groupby(columns) if function is numpy.count_nonzero: aggregation = selection.aggregate( **{", "data.mutate(**{dimension.name: values}) return data @classmethod @cached def isscalar(cls, dataset, dim):", "ibis.expr.operations.Variance, len: ibis.expr.operations.Count, }.get(function, function) if len(dimensions): selection = new.groupby(columns)", "collections import Iterable from .. import util from ..element import", "data.count().execute() == 1 and len(dataset.vdims) == 1: return data[dataset.vdims[0].name].execute().iloc[0] return", "predicates = None for sample in samples: if util.isscalar(sample): sample", "def assign(cls, dataset, new_data): return dataset.data.mutate(**new_data) @classmethod def add_dimension(cls, dataset,", "!= \"raw\" and issubclass(group_type, Element): group_kwargs = dict(util.get_param_values(dataset), kdims=element_dims) group_kwargs.update(kwargs)", "ibis.expr.operations.StandardDev, numpy.sum: ibis.expr.operations.Sum, numpy.nansum: ibis.expr.operations.Sum, numpy.var: ibis.expr.operations.Variance, numpy.nanvar: ibis.expr.operations.Variance, len:", "@cached def shape(cls, dataset): return cls.length(dataset), len(dataset.data.columns) @classmethod @cached def", "dim, object in selection.items(): if isinstance(object, tuple): object = slice(*object)", "needed until ibis updates versions @classmethod def has_rowid(cls): import ibis.expr.operations", "if object.stop is not None: bound = util.numpy_scalar_to_python(object.stop) predicates.append(column <", "not in data.columns] return aggregation, dropped @classmethod @cached def mask(cls,", "x not in data.columns] return aggregation, dropped @classmethod @cached def", "validate_backends (backend,) = validate_backends(list(find_backends(data))) except Exception: backend = data._find_backend() return", "of an empty query. return dataset.data[[]].count().execute() @classmethod @cached def nonzero(cls,", "for x in (rows.start, rows.stop, rows.step)): predicates = [] if", "if not cls.has_rowid(): raise ValueError( \"iloc expressions are not supported", "NotImplementedError(\"Weighted histograms currently \" \"not implemented for IbisInterface.\") if density:", "type(values).__name__) data = data.mutate(**{dimension.name: values}) return data @classmethod @cached def", "ibis.expr.operations.Count, }.get(function, function) if len(dimensions): selection = new.groupby(columns) if function", "not cls.loaded(): return False from ibis.expr.types import Expr return isinstance(obj,", "for d in dataset.kdims if d in dimensions] values =", "index): rows, columns = index scalar = all(map(util.isscalar, index)) if", "is None else data.filter(predicates) @classmethod def aggregate(cls, dataset, dimensions, function,", "or scalar.\" % type(values).__name__) data = data.mutate(**{dimension.name: values}) return data", "Iterable from .. import util from ..element import Element from", "def isscalar(cls, dataset, dim): return ( dataset.data[dataset.get_dimension(dim, strict=True).name] .distinct() .count()", "are not supported for ibis version %s.\" % ibis.__version__ )", "1: return data return data.execute().iat[0, 0] @classmethod def groupby(cls, dataset,", "dimension): dimension = dataset.get_dimension(dimension) return dataset.data.head(0).execute().dtypes[dimension.name] dimension_type = dtype @classmethod", "v in enumerate(sample): p = data[dims[i].name] == ibis.literal(util.numpy_scalar_to_python(v)) if predicate", "else: aggregation = new.aggregate( **{x: function(new[x]).to_expr() for x in new.columns}", "import util from ..element import Element from ..ndmapping import NdMapping,", "is None: values = [c for c in data.columns if", "execute a query against the table to find the unique", "in enumerate(sample): p = data[dims[i].name] == ibis.literal(util.numpy_scalar_to_python(v)) if predicate is", "rows.stop] return data.filter(predicates).drop([\"hv_row_id__\"]) else: if not isinstance(rows, Iterable): rows =", "not isinstance(values, ibis.Expr) and not numpy.isscalar(values): raise ValueError(\"Cannot assign %s", "import Expr return isinstance(obj, Expr) @classmethod def init(cls, eltype, data,", "for sample in samples: if util.isscalar(sample): sample = [sample] if", "groups.iterrows() ] if issubclass(container_type, NdMapping): with item_check(False), sorted_context(False): return container_type(data,", "in selection.items(): if isinstance(object, tuple): object = slice(*object) alias =", "b, v in zip(hist_bins['bucket'], hist_bins['count']): if numpy.isnan(b): continue hist[int(b)] =", "{} @classmethod def compute(cls, dataset): return dataset.clone(dataset.data.execute()) @classmethod def persist(cls,", "element_dims = [kdim for kdim in dataset.kdims if kdim not", "in samples] return data[data[dims[0].name].isin(items)] predicates = None for sample in", "if values is None: values = [ key for key", "condition = None for id in object: predicate = column", "is not None: bound = util.numpy_scalar_to_python(object.stop) predicates.append(column < bound) elif", "dataset.data.head(0).execute().dtypes[dimension.name] dimension_type = dtype @classmethod def sort(cls, dataset, by=[], reverse=False):", "histogram(cls, expr, bins, density=True, weights=None): bins = numpy.asarray(bins) bins =", "numpy.isscalar(values): raise ValueError(\"Cannot assign %s type as a Ibis table", "data if predicates is None else data.filter(predicates) @classmethod def aggregate(cls,", "group_type, **kwargs): # aggregate the necesary dimensions index_dims = [dataset.get_dimension(d,", "if rows.start: predicates += [data.hv_row_id__ >= rows.start] if rows.stop: predicates", "selection.aggregate( **{ x: function(new[x]).to_expr() for x in new.columns if x", "in new.columns} ) dropped = [x for x in values", "condition | predicate ) if condition is not None: predicates.append(condition)", "i, s in groups.iterrows() ] if issubclass(container_type, NdMapping): with item_check(False),", "length by counting the length of an empty query. return", "@classmethod @cached def nonzero(cls, dataset): # Make an empty query", "groups = dataset.data.groupby(group_by).aggregate().execute() # filter each group based on the", "dimension = dataset.get_dimension(dimension, strict=True) data = dataset.data[dimension.name] if not expanded:", "bins] binned = expr.bucket(bins).name('bucket') hist = numpy.zeros(len(bins)-1) hist_bins = binned.value_counts().sort_by('bucket').execute()", "( dataset.data[dataset.get_dimension(dim, strict=True).name] .distinct() .count() .compute() == 1 ) @classmethod", "applies(cls, obj): if not cls.loaded(): return False from ibis.expr.types import", "= dataset.get_dimension(dim).name column = dataset.data[alias] if isinstance(object, slice): if object.start", "return aggregation, dropped @classmethod @cached def mask(cls, dataset, mask, mask_value=numpy.nan):", "return data.drop([\"hv_row_id__\"]) @classmethod def unpack_scalar(cls, dataset, data): \"\"\" Given a", "# Workaround for dask issue #3392 bound = util.numpy_scalar_to_python(object.start) predicates.append(bound", "x in dataset.dimensions()[columns]] elif numpy.isscalar(columns): columns = [dataset.get_dimension(columns).name] else: columns", "else None]) return data, dict(kdims=keys, vdims=values), {} @classmethod def compute(cls,", "return data return data.execute().iat[0, 0] @classmethod def groupby(cls, dataset, dimensions,", "predicate return data if predicates is None else data.filter(predicates) @classmethod", "= new.aggregate( **{x: function(new[x]).to_expr() for x in new.columns} ) dropped", "dim): return ( dataset.data[dataset.get_dimension(dim, strict=True).name] .distinct() .count() .compute() == 1", "@cached def mask(cls, dataset, mask, mask_value=numpy.nan): raise NotImplementedError('Mask is not", "s in groups.iterrows() ] if issubclass(container_type, NdMapping): with item_check(False), sorted_context(False):", "\"iloc expressions are not supported for ibis version %s.\" %", "predicates = predicate else: predicates |= predicate return data if", "cls.indexed(dataset, selection) data = dataset.data if isinstance(selection_mask, numpy.ndarray): data =", "object in selection.items(): if isinstance(object, tuple): object = slice(*object) alias", "density: hist = hist/expr.count().execute() return hist, bins @classmethod @cached def", "for x in values if x not in data.columns] return", "= v if weights is not None: raise NotImplementedError(\"Weighted histograms", "d in index_dims] # execute a query against the table", "if d in dimensions] values = dataset.dimensions(\"value\", label=\"name\") new =", "p = data[dims[i].name] == ibis.literal(util.numpy_scalar_to_python(v)) if predicate is None: predicate", "data.distinct() return data if keep_index or not compute else data.execute().values", "def is_rowid_zero_indexed(cls, data): try: from ibis.client import find_backends, validate_backends (backend,)", "histograms currently \" \"not implemented for IbisInterface.\") if density: hist", "binned.value_counts().sort_by('bucket').execute() for b, v in zip(hist_bins['bucket'], hist_bins['count']): if numpy.isnan(b): continue", "def nonzero(cls, dataset): # Make an empty query to see", "selection_mask = numpy.where(selection_mask)[0] data = data.filter( data[\"hv_row_id__\"].isin(list(map(int, selection_mask))) ).drop([\"hv_row_id__\"]) elif", "= hist/expr.count().execute() return hist, bins @classmethod @cached def shape(cls, dataset):", "return hasattr(ibis.expr.operations, \"RowID\") @classmethod def is_rowid_zero_indexed(cls, data): try: from ibis.client", "return data.filter([data.hv_row_id__.isin(rows)]).drop([\"hv_row_id__\"]) return data.drop([\"hv_row_id__\"]) @classmethod def unpack_scalar(cls, dataset, data): \"\"\"", "selection_mask = cls.select_mask(dataset, selection) indexed = cls.indexed(dataset, selection) data =", "selection.items(): if isinstance(object, tuple): object = slice(*object) alias = dataset.get_dimension(dim).name", "dataset.data[alias] if isinstance(object, slice): if object.start is not None: #", "= data._find_backend() return type(backend).__module__ in cls.zero_indexed_backend_modules @classmethod def loaded(cls): return", "length of an empty query. return dataset.data[[]].count().execute() @classmethod @cached def", "dataset.data.groupby(group_by).aggregate().execute() # filter each group based on the predicate defined.", "predicates += [data.hv_row_id__ < rows.stop] return data.filter(predicates).drop([\"hv_row_id__\"]) else: if not", "a pseudo column for the row number but i think", "%s type as a Ibis table column, \" \"expecting either", "defined. data = [ ( tuple(s.values.tolist()), group_type( dataset.data.filter( [dataset.data[k] ==", "if numpy.isnan(b): continue hist[int(b)] = v if weights is not", "[x.name for x in dataset.dimensions()[columns]] elif numpy.isscalar(columns): columns = [dataset.get_dimension(columns).name]", "or data[[]].count().execute() != 1: return data return data.execute().iat[0, 0] @classmethod", "data.columns[ndim : ((ndim + nvdim) if nvdim else None)] if", "and not selection_mask): data = data.filter(selection_mask) if indexed and data.count().execute()", "in values if x not in data.columns] return aggregation, dropped", "validate = pandas.PandasInterface.validate reindex = pandas.PandasInterface.reindex @classmethod def _index_ibis_table(cls, data):", "selection_mask))) ).drop([\"hv_row_id__\"]) elif selection_mask is not None and not (isinstance(selection_mask,", "columns = [dataset.get_dimension(columns).name] else: columns = [dataset.get_dimension(d).name for d in", "= binned.value_counts().sort_by('bucket').execute() for b, v in zip(hist_bins['bucket'], hist_bins['count']): if numpy.isnan(b):", "if keep_index or not compute else data.execute().values @classmethod def histogram(cls,", "backend = data._find_backend() return type(backend).__module__ in cls.zero_indexed_backend_modules @classmethod def loaded(cls):", ".execute() .iloc[0, 0] ) if isinstance(rows, slice): # We should", "if condition is None else condition | predicate ) if", "from ..element import Element from ..ndmapping import NdMapping, item_check, sorted_context", "Iterable except ImportError: from collections import Iterable from .. import", "a query against the table to find the unique groups.", ":] ) @classmethod @cached def values( cls, dataset, dimension, expanded=True,", "== rows)[columns] .head(1) .execute() .iloc[0, 0] ) if isinstance(rows, slice):", "numpy.max: ibis.expr.operations.Max, numpy.nanmax: ibis.expr.operations.Max, numpy.mean: ibis.expr.operations.Mean, numpy.nanmean: ibis.expr.operations.Mean, numpy.std: ibis.expr.operations.StandardDev,", "[] and values is None: values = list(data.columns[: nvdim if", "dimension.name not in data.columns: if not isinstance(values, ibis.Expr) and not", "@classmethod def assign(cls, dataset, new_data): return dataset.data.mutate(**new_data) @classmethod def add_dimension(cls,", "def sort(cls, dataset, by=[], reverse=False): return dataset.data.sort_by([(dataset.get_dimension(x).name, not reverse) for", "@classmethod def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): # aggregate", "dataset, selection_mask=None, **selection): if selection_mask is None: selection_mask = cls.select_mask(dataset,", "if any(x is not None for x in (rows.start, rows.stop,", "in s.to_dict().items()] ), **group_kwargs ), ) for i, s in", "data.filter(predicates).drop([\"hv_row_id__\"]) else: if not isinstance(rows, Iterable): rows = [rows] return", "for d in columns] data = cls._index_ibis_table(dataset.data[columns]) if scalar: return", "dataset.data.aggregate([column.min(), column.max()]).execute().values[0, :] ) @classmethod @cached def values( cls, dataset,", "a simple scalar. \"\"\" if len(data.columns) > 1 or data[[]].count().execute()", "that is still awaiting # a pr on ibis if", "return container_type(data, kdims=index_dims) else: return container_type(data) @classmethod def assign(cls, dataset,", "@classmethod def select_mask(cls, dataset, selection): import ibis predicates = []", "@cached def length(self, dataset): # Get the length by counting", "group_by = [d.name for d in index_dims] # execute a", "container_type(data) @classmethod def assign(cls, dataset, new_data): return dataset.data.mutate(**new_data) @classmethod def", "returned. return bool(len(dataset.data[[]].head(1).execute())) @classmethod @cached def range(cls, dataset, dimension): dimension", "params[\"vdims\"] if isinstance(index.bounds[1], int): ndim = min([index.bounds[1], len(index.default)]) else: ndim", "[dataset.get_dimension(d, strict=True) for d in dimensions] element_dims = [kdim for", "== numpy.dtype(\"bool\"): selection_mask = numpy.where(selection_mask)[0] data = data.filter( data[\"hv_row_id__\"].isin(list(map(int, selection_mask)))", "**kwargs): # aggregate the necesary dimensions index_dims = [dataset.get_dimension(d, strict=True)", "and values is None: values = [c for c in", "for c in data.columns if c not in values][:ndim] elif", "= numpy.zeros(len(bins)-1) hist_bins = binned.value_counts().sort_by('bucket').execute() for b, v in zip(hist_bins['bucket'],", "= [] if rows.start: predicates += [data.hv_row_id__ >= rows.start] if", "dask issue #3392 bound = util.numpy_scalar_to_python(object.start) predicates.append(bound <= column) if", "selection_mask): data = data.filter(selection_mask) if indexed and data.count().execute() == 1", "columns.bounds[1] if isinstance(columns.bounds[1], int) else None if keys and values", "issubclass(container_type, NdMapping): with item_check(False), sorted_context(False): return container_type(data, kdims=index_dims) else: return", "if c not in keys] elif values and keys is", "find_backends, validate_backends (backend,) = validate_backends(list(find_backends(data))) except Exception: backend = data._find_backend()", "bound) elif isinstance(object, (set, list)): # rowid conditions condition =", "int) else None if keys and values is None: values", "data): \"\"\" Given a dataset object and data in the", "data if cls.is_rowid_zero_indexed(data): return data.mutate(hv_row_id__=data.rowid()) else: return data.mutate(hv_row_id__=data.rowid() - 1)", "isinstance(index.bounds[1], int): ndim = min([index.bounds[1], len(index.default)]) else: ndim = None", "dataset.data if all(util.isscalar(s) or len(s) == 1 for s in", "bound = util.numpy_scalar_to_python(object.start) predicates.append(bound <= column) if object.stop is not", "not None: # Workaround for dask issue #3392 bound =", "ibis.expr.operations.Max, numpy.nanmax: ibis.expr.operations.Max, numpy.mean: ibis.expr.operations.Mean, numpy.nanmean: ibis.expr.operations.Mean, numpy.std: ibis.expr.operations.StandardDev, numpy.nanstd:", "use a pseudo column for the row number but i", "= util.numpy_scalar_to_python(object.start) predicates.append(bound <= column) if object.stop is not None:", "if keys and values is None: values = [c for", "in groups.iterrows() ] if issubclass(container_type, NdMapping): with item_check(False), sorted_context(False): return", "return data[dataset.vdims[0].name].execute().iloc[0] return data @classmethod def select_mask(cls, dataset, selection): import", "dimension, dim_pos, values, vdim): import ibis data = dataset.data if", "), **group_kwargs ), ) for i, s in groups.iterrows() ]", "length(self, dataset): # Get the length by counting the length", "= {} if group_type != \"raw\" and issubclass(group_type, Element): group_kwargs", "return cls.length(dataset), len(dataset.data.columns) @classmethod @cached def dtype(cls, dataset, dimension): dimension", "None: raise NotImplementedError(\"Weighted histograms currently \" \"not implemented for IbisInterface.\")", "return dataset.data.mutate( **{v.name: dataset.data[k] for k, v in dimensions.items()} )", "== 1 for s in samples): items = [s[0] if", "dataset.data if isinstance(selection_mask, numpy.ndarray): data = cls._index_ibis_table(data) if selection_mask.dtype ==", "= columns.bounds[1] if isinstance(columns.bounds[1], int) else None if keys and", "hist = hist/expr.count().execute() return hist, bins @classmethod @cached def shape(cls,", ") if isinstance(rows, slice): # We should use a pseudo", "v in zip(hist_bins['bucket'], hist_bins['count']): if numpy.isnan(b): continue hist[int(b)] = v", "by counting the length of an empty query. return dataset.data[[]].count().execute()", "ibis.expr.operations.Count(new[x], where=new[x] != 0).to_expr() for x in new.columns if x", "cls.is_rowid_zero_indexed(data): return data.mutate(hv_row_id__=data.rowid()) else: return data.mutate(hv_row_id__=data.rowid() - 1) @classmethod def", "not implemented for IbisInterface.') @classmethod @cached def dframe(cls, dataset, dimensions):", "cls.dtype(dataset, dimension).kind in 'SUO': return None, None if dimension.nodata is", "zip(hist_bins['bucket'], hist_bins['count']): if numpy.isnan(b): continue hist[int(b)] = v if weights", "= selection.aggregate( **{ x: ibis.expr.operations.Count(new[x], where=new[x] != 0).to_expr() for x", "if len(dimensions): selection = new.groupby(columns) if function is numpy.count_nonzero: aggregation", "= min([index.bounds[1], len(index.default)]) else: ndim = None nvdim = columns.bounds[1]", "list)): # rowid conditions condition = None for id in", "bins = [int(v) if bins.dtype.kind in 'iu' else float(v) for", "and keys is None: keys = [c for c in", "data[columns + values] function = { numpy.min: ibis.expr.operations.Min, numpy.nanmin: ibis.expr.operations.Min,", "\"ibis\" in sys.modules @classmethod def applies(cls, obj): if not cls.loaded():", "= dataset.data if all(util.isscalar(s) or len(s) == 1 for s", "type as a Ibis table column, \" \"expecting either ibis.Expr", "ImportError: from collections import Iterable from .. import util from", "ibis.expr.operations.Sum, numpy.var: ibis.expr.operations.Variance, numpy.nanvar: ibis.expr.operations.Variance, len: ibis.expr.operations.Count, }.get(function, function) if", "validate_backends(list(find_backends(data))) except Exception: backend = data._find_backend() return type(backend).__module__ in cls.zero_indexed_backend_modules", "keys] elif values and keys is None: keys = [c", "def values( cls, dataset, dimension, expanded=True, flat=True, compute=True, keep_index=False, ):", "None: values = list(data.columns[: nvdim if nvdim else None]) return", "] # the rowid is needed until ibis updates versions", "def unpack_scalar(cls, dataset, data): \"\"\" Given a dataset object and", "len(index.default)]) else: ndim = None nvdim = columns.bounds[1] if isinstance(columns.bounds[1],", "simple scalar. \"\"\" if len(data.columns) > 1 or data[[]].count().execute() !=", "predicates.append(object(column)) elif isinstance(object, ibis.Expr): predicates.append(object) else: predicates.append(column == object) return", "c in data.columns if c not in values][:ndim] elif keys", "IbisInterface.\") if density: hist = hist/expr.count().execute() return hist, bins @classmethod", "def select(cls, dataset, selection_mask=None, **selection): if selection_mask is None: selection_mask", "item_check(False), sorted_context(False): return container_type(data, kdims=index_dims) else: return container_type(data) @classmethod def", "p else: predicate &= p if predicates is None: predicates", "columns } ) else: aggregation = new.aggregate( **{x: function(new[x]).to_expr() for", "== object) return predicates @classmethod def sample(cls, dataset, samples=[]): import", "condition is not None: predicates.append(condition) elif callable(object): predicates.append(object(column)) elif isinstance(object,", "if rows.stop: predicates += [data.hv_row_id__ < rows.stop] return data.filter(predicates).drop([\"hv_row_id__\"]) else:", "dataset.dataset group_by = [d.name for d in index_dims] # execute", "the appropriate format for the interface, return a simple scalar.", "in 'SUO': return None, None if dimension.nodata is not None:", "if not cls.loaded(): return False from ibis.expr.types import Expr return", "= () datatype = \"ibis\" default_partitions = 100 zero_indexed_backend_modules =", "column.max()]).execute().values[0, :] ) @classmethod @cached def values( cls, dataset, dimension,", "data = data.distinct() return data if keep_index or not compute", "= eltype.param.objects() index = params[\"kdims\"] columns = params[\"vdims\"] if isinstance(index.bounds[1],", "for d in index_dims] # execute a query against the", "dataset.kdims if d in dimensions] values = dataset.dimensions(\"value\", label=\"name\") new", "eltype.param.objects() index = params[\"kdims\"] columns = params[\"vdims\"] if isinstance(index.bounds[1], int):", "\"raw\" and issubclass(group_type, Element): group_kwargs = dict(util.get_param_values(dataset), kdims=element_dims) group_kwargs.update(kwargs) group_kwargs[\"dataset\"]", "def aggregate(cls, dataset, dimensions, function, **kwargs): import ibis.expr.operations data =", "rows = [rows] return data.filter([data.hv_row_id__.isin(rows)]).drop([\"hv_row_id__\"]) return data.drop([\"hv_row_id__\"]) @classmethod def unpack_scalar(cls,", "< rows.stop] return data.filter(predicates).drop([\"hv_row_id__\"]) else: if not isinstance(rows, Iterable): rows", "tuple): object = slice(*object) alias = dataset.get_dimension(dim).name column = dataset.data[alias]", "in values][:ndim] elif keys is None: keys = list(data.columns[:ndim]) if", "dataset, index): rows, columns = index scalar = all(map(util.isscalar, index))", "strict=True) data = dataset.data[dimension.name] if not expanded: data = data.distinct()", "if all(util.isscalar(s) or len(s) == 1 for s in samples):", "Element from ..ndmapping import NdMapping, item_check, sorted_context from .interface import", "# filter each group based on the predicate defined. data", ".util import cached class IbisInterface(Interface): types = () datatype =", "expanded: data = data.distinct() return data if keep_index or not", "except ImportError: from collections import Iterable from .. import util", "def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): # aggregate the", "def length(self, dataset): # Get the length by counting the", "= data.filter( data[\"hv_row_id__\"].isin(list(map(int, selection_mask))) ).drop([\"hv_row_id__\"]) elif selection_mask is not None", "versions @classmethod def has_rowid(cls): import ibis.expr.operations return hasattr(ibis.expr.operations, \"RowID\") @classmethod", "dataset.dimensions(\"value\", label=\"name\") new = data[columns + values] function = {", "for x in dataset.dimensions()[columns]] elif numpy.isscalar(columns): columns = [dataset.get_dimension(columns).name] else:", "@classmethod def has_rowid(cls): import ibis.expr.operations return hasattr(ibis.expr.operations, \"RowID\") @classmethod def", "( tuple(s.values.tolist()), group_type( dataset.data.filter( [dataset.data[k] == v for k, v", "@cached def range(cls, dataset, dimension): dimension = dataset.get_dimension(dimension, strict=True) if", "dataset.kdims if kdim not in index_dims] group_kwargs = {} if", "bins @classmethod @cached def shape(cls, dataset): return cls.length(dataset), len(dataset.data.columns) @classmethod", "bound = util.numpy_scalar_to_python(object.stop) predicates.append(column < bound) elif isinstance(object, (set, list)):", "pandas.PandasInterface.validate reindex = pandas.PandasInterface.reindex @classmethod def _index_ibis_table(cls, data): import ibis", "bins, density=True, weights=None): bins = numpy.asarray(bins) bins = [int(v) if", "..element import Element from ..ndmapping import NdMapping, item_check, sorted_context from", "None for sample in samples: if util.isscalar(sample): sample = [sample]", "hist, bins @classmethod @cached def shape(cls, dataset): return cls.length(dataset), len(dataset.data.columns)", "default_partitions = 100 zero_indexed_backend_modules = [ 'ibis.backends.omniscidb.client', ] # the", "# the rowid is needed until ibis updates versions @classmethod", "function(new[x]).to_expr() for x in new.columns} ) dropped = [x for", "return data.execute().iat[0, 0] @classmethod def groupby(cls, dataset, dimensions, container_type, group_type,", "NdMapping): with item_check(False), sorted_context(False): return container_type(data, kdims=index_dims) else: return container_type(data)", "import find_backends, validate_backends (backend,) = validate_backends(list(find_backends(data))) except Exception: backend =", "in samples: if util.isscalar(sample): sample = [sample] if not sample:", "ibis dims = dataset.dimensions() data = dataset.data if all(util.isscalar(s) or", "data = data.filter(selection_mask) if indexed and data.count().execute() == 1 and", "@classmethod @cached def dtype(cls, dataset, dimension): dimension = dataset.get_dimension(dimension) return", "dims = dataset.dimensions() data = dataset.data if all(util.isscalar(s) or len(s)", "a pr on ibis if any(x is not None for", "== id condition = ( predicate if condition is None", "} ) else: aggregation = selection.aggregate( **{ x: function(new[x]).to_expr() for", "columns = [x.name for x in dataset.dimensions()[columns]] elif numpy.isscalar(columns): columns", "dataset, by=[], reverse=False): return dataset.data.sort_by([(dataset.get_dimension(x).name, not reverse) for x in", ") else: aggregation = selection.aggregate( **{ x: function(new[x]).to_expr() for x", "ibis.expr.operations.Max, numpy.mean: ibis.expr.operations.Mean, numpy.nanmean: ibis.expr.operations.Mean, numpy.std: ibis.expr.operations.StandardDev, numpy.nanstd: ibis.expr.operations.StandardDev, numpy.sum:", "dataset.get_dimension(dim).name column = dataset.data[alias] if isinstance(object, slice): if object.start is", "data.columns] return aggregation, dropped @classmethod @cached def mask(cls, dataset, mask,", "'SUO': return None, None if dimension.nodata is not None: return", "We should use a pseudo column for the row number", "if dimension.nodata is not None: return Interface.range(dataset, dimension) column =", "d in dimensions] values = dataset.dimensions(\"value\", label=\"name\") new = data[columns", "dataset.data[dataset.get_dimension(dim, strict=True).name] .distinct() .count() .compute() == 1 ) @classmethod def", "scalar = all(map(util.isscalar, index)) if isinstance(columns, slice): columns = [x.name", "index_dims] group_kwargs = {} if group_type != \"raw\" and issubclass(group_type,", "dataset.data.sort_by([(dataset.get_dimension(x).name, not reverse) for x in by]) @classmethod def redim(cls,", "index_dims = [dataset.get_dimension(d, strict=True) for d in dimensions] element_dims =", "= [kdim for kdim in dataset.kdims if kdim not in", "is not None: # Workaround for dask issue #3392 bound", "= numpy.asarray(bins) bins = [int(v) if bins.dtype.kind in 'iu' else", "column) if object.stop is not None: bound = util.numpy_scalar_to_python(object.stop) predicates.append(column", "counting the length of an empty query. return dataset.data[[]].count().execute() @classmethod", "\"expecting either ibis.Expr or scalar.\" % type(values).__name__) data = data.mutate(**{dimension.name:", "= slice(*object) alias = dataset.get_dimension(dim).name column = dataset.data[alias] if isinstance(object,", "in zip(hist_bins['bucket'], hist_bins['count']): if numpy.isnan(b): continue hist[int(b)] = v if", "predicate = None for i, v in enumerate(sample): p =", "return False from ibis.expr.types import Expr return isinstance(obj, Expr) @classmethod", "numpy.sum: ibis.expr.operations.Sum, numpy.nansum: ibis.expr.operations.Sum, numpy.var: ibis.expr.operations.Variance, numpy.nanvar: ibis.expr.operations.Variance, len: ibis.expr.operations.Count,", "scalar: return ( data.filter(data.hv_row_id__ == rows)[columns] .head(1) .execute() .iloc[0, 0]", "for key in data.columns[ndim : ((ndim + nvdim) if nvdim", "data, keys, values): params = eltype.param.objects() index = params[\"kdims\"] columns", "tuple( dataset.data.aggregate([column.min(), column.max()]).execute().values[0, :] ) @classmethod @cached def values( cls,", "else: predicates |= predicate return data if predicates is None", "kdims=element_dims) group_kwargs.update(kwargs) group_kwargs[\"dataset\"] = dataset.dataset group_by = [d.name for d", "#3392 bound = util.numpy_scalar_to_python(object.start) predicates.append(bound <= column) if object.stop is", "keys is None: keys = [c for c in data.columns", "= dataset.data if dimension.name not in data.columns: if not isinstance(values,", "if selection_mask is None: selection_mask = cls.select_mask(dataset, selection) indexed =", "table to find the unique groups. groups = dataset.data.groupby(group_by).aggregate().execute() #", "numpy.zeros(len(bins)-1) hist_bins = binned.value_counts().sort_by('bucket').execute() for b, v in zip(hist_bins['bucket'], hist_bins['count']):", "1 or data[[]].count().execute() != 1: return data return data.execute().iat[0, 0]", "keys ] elif keys == [] and values is None:", "ibis.expr.operations return hasattr(ibis.expr.operations, \"RowID\") @classmethod def is_rowid_zero_indexed(cls, data): try: from", "conditions condition = None for id in object: predicate =", "( predicate if condition is None else condition | predicate", "any(x is not None for x in (rows.start, rows.stop, rows.step)):", "dataset.data if dimension.name not in data.columns: if not isinstance(values, ibis.Expr)", "values is None: values = [c for c in data.columns", "dataset, new_data): return dataset.data.mutate(**new_data) @classmethod def add_dimension(cls, dataset, dimension, dim_pos,", "util.isscalar(sample): sample = [sample] if not sample: continue predicate =", "== v for k, v in s.to_dict().items()] ), **group_kwargs ),", "not in columns } ) else: aggregation = new.aggregate( **{x:", "= None for id in object: predicate = column ==", "ibis.expr.operations.Min, numpy.max: ibis.expr.operations.Max, numpy.nanmax: ibis.expr.operations.Max, numpy.mean: ibis.expr.operations.Mean, numpy.nanmean: ibis.expr.operations.Mean, numpy.std:", "slice): if object.start is not None: # Workaround for dask", "+ values] function = { numpy.min: ibis.expr.operations.Min, numpy.nanmin: ibis.expr.operations.Min, numpy.max:", "rows)[columns] .head(1) .execute() .iloc[0, 0] ) if isinstance(rows, slice): #", "if cls.dtype(dataset, dimension).kind in 'SUO': return None, None if dimension.nodata", "util from ..element import Element from ..ndmapping import NdMapping, item_check,", "in the appropriate format for the interface, return a simple", "if dimension.name not in data.columns: if not isinstance(values, ibis.Expr) and", "if weights is not None: raise NotImplementedError(\"Weighted histograms currently \"", "= [c for c in data.columns if c not in", "rowid conditions condition = None for id in object: predicate", "in index_dims] # execute a query against the table to", "import cached class IbisInterface(Interface): types = () datatype = \"ibis\"", "column = dataset.data[alias] if isinstance(object, slice): if object.start is not", "function) if len(dimensions): selection = new.groupby(columns) if function is numpy.count_nonzero:", "predicate defined. data = [ ( tuple(s.values.tolist()), group_type( dataset.data.filter( [dataset.data[k]", "aggregation, dropped @classmethod @cached def mask(cls, dataset, mask, mask_value=numpy.nan): raise", "dataset.data[[]].count().execute() @classmethod @cached def nonzero(cls, dataset): # Make an empty", "but i think that is still awaiting # a pr", "think that is still awaiting # a pr on ibis", "groups. groups = dataset.data.groupby(group_by).aggregate().execute() # filter each group based on", "[dataset.get_dimension(d).name for d in columns] data = cls._index_ibis_table(dataset.data[columns]) if scalar:", "dataset.data columns = [d.name for d in dataset.kdims if d", "raise ValueError( \"iloc expressions are not supported for ibis version", "selection): import ibis predicates = [] for dim, object in", "Expr) @classmethod def init(cls, eltype, data, keys, values): params =", "is_rowid_zero_indexed(cls, data): try: from ibis.client import find_backends, validate_backends (backend,) =", "values is None: values = list(data.columns[: nvdim if nvdim else", "dataset.clone(dataset.data.execute()) @classmethod def persist(cls, dataset): return cls.compute(dataset) @classmethod @cached def", "dimension).kind in 'SUO': return None, None if dimension.nodata is not", "new.groupby(columns) if function is numpy.count_nonzero: aggregation = selection.aggregate( **{ x:", "has_rowid(cls): import ibis.expr.operations return hasattr(ibis.expr.operations, \"RowID\") @classmethod def is_rowid_zero_indexed(cls, data):", "numpy.nanstd: ibis.expr.operations.StandardDev, numpy.sum: ibis.expr.operations.Sum, numpy.nansum: ibis.expr.operations.Sum, numpy.var: ibis.expr.operations.Variance, numpy.nanvar: ibis.expr.operations.Variance,", "@classmethod def sample(cls, dataset, samples=[]): import ibis dims = dataset.dimensions()", "def range(cls, dataset, dimension): dimension = dataset.get_dimension(dimension, strict=True) if cls.dtype(dataset,", "if nvdim else None)] if key not in keys ]", "dimensions.items()} ) validate = pandas.PandasInterface.validate reindex = pandas.PandasInterface.reindex @classmethod def", "predicates.append(object) else: predicates.append(column == object) return predicates @classmethod def sample(cls,", "is None: values = [ key for key in data.columns[ndim", "data = [ ( tuple(s.values.tolist()), group_type( dataset.data.filter( [dataset.data[k] == v", "in columns } ) else: aggregation = selection.aggregate( **{ x:", "expressions are not supported for ibis version %s.\" % ibis.__version__", "for s in samples] return data[data[dims[0].name].isin(items)] predicates = None for", "ibis updates versions @classmethod def has_rowid(cls): import ibis.expr.operations return hasattr(ibis.expr.operations,", "= dataset.data[dimension.name] return tuple( dataset.data.aggregate([column.min(), column.max()]).execute().values[0, :] ) @classmethod @cached", "the row number but i think that is still awaiting", "on the predicate defined. data = [ ( tuple(s.values.tolist()), group_type(", "1 and len(dataset.vdims) == 1: return data[dataset.vdims[0].name].execute().iloc[0] return data @classmethod", "= [dataset.get_dimension(d, strict=True) for d in dimensions] element_dims = [kdim", "\" \"expecting either ibis.Expr or scalar.\" % type(values).__name__) data =", "x: ibis.expr.operations.Count(new[x], where=new[x] != 0).to_expr() for x in new.columns if", "<gh_stars>1-10 import sys import numpy try: from collections.abc import Iterable", "return Interface.range(dataset, dimension) column = dataset.data[dimension.name] return tuple( dataset.data.aggregate([column.min(), column.max()]).execute().values[0,", "raise NotImplementedError(\"Weighted histograms currently \" \"not implemented for IbisInterface.\") if", ".. import util from ..element import Element from ..ndmapping import", "!= 1: return data return data.execute().iat[0, 0] @classmethod def groupby(cls,", "return dataset.data.sort_by([(dataset.get_dimension(x).name, not reverse) for x in by]) @classmethod def", "list(data.columns[:ndim]) if values is None: values = [ key for", "return predicates @classmethod def sample(cls, dataset, samples=[]): import ibis dims", "(rows.start, rows.stop, rows.step)): predicates = [] if rows.start: predicates +=", "in 'iu' else float(v) for v in bins] binned =", "sys import numpy try: from collections.abc import Iterable except ImportError:", "select(cls, dataset, selection_mask=None, **selection): if selection_mask is None: selection_mask =", "types = () datatype = \"ibis\" default_partitions = 100 zero_indexed_backend_modules", "None else condition | predicate ) if condition is not", "if kdim not in index_dims] group_kwargs = {} if group_type", "return a simple scalar. \"\"\" if len(data.columns) > 1 or", "hist/expr.count().execute() return hist, bins @classmethod @cached def shape(cls, dataset): return", "not in columns } ) else: aggregation = selection.aggregate( **{", "from ibis.client import find_backends, validate_backends (backend,) = validate_backends(list(find_backends(data))) except Exception:", ".head(1) .execute() .iloc[0, 0] ) if isinstance(rows, slice): # We", "add_dimension(cls, dataset, dimension, dim_pos, values, vdim): import ibis data =", "numpy.count_nonzero: aggregation = selection.aggregate( **{ x: ibis.expr.operations.Count(new[x], where=new[x] != 0).to_expr()", "collections.abc import Iterable except ImportError: from collections import Iterable from", "data._find_backend() return type(backend).__module__ in cls.zero_indexed_backend_modules @classmethod def loaded(cls): return \"ibis\"", "a row is returned. return bool(len(dataset.data[[]].head(1).execute())) @classmethod @cached def range(cls,", "numpy.var: ibis.expr.operations.Variance, numpy.nanvar: ibis.expr.operations.Variance, len: ibis.expr.operations.Count, }.get(function, function) if len(dimensions):", "is None: predicates = predicate else: predicates |= predicate return", "data = cls._index_ibis_table(data) if selection_mask.dtype == numpy.dtype(\"bool\"): selection_mask = numpy.where(selection_mask)[0]", "cls.select_mask(dataset, selection) indexed = cls.indexed(dataset, selection) data = dataset.data if", "s.to_dict().items()] ), **group_kwargs ), ) for i, s in groups.iterrows()", "@classmethod @cached def length(self, dataset): # Get the length by", "ibis if any(x is not None for x in (rows.start,", "min([index.bounds[1], len(index.default)]) else: ndim = None nvdim = columns.bounds[1] if", "kdim in dataset.kdims if kdim not in index_dims] group_kwargs =", "d in columns] data = cls._index_ibis_table(dataset.data[columns]) if scalar: return (", "data = dataset.data if dimension.name not in data.columns: if not", "from ibis.expr.types import Expr return isinstance(obj, Expr) @classmethod def init(cls,", "def _index_ibis_table(cls, data): import ibis if not cls.has_rowid(): raise ValueError(", "p if predicates is None: predicates = predicate else: predicates", "nvdim) if nvdim else None)] if key not in keys", "} ) else: aggregation = new.aggregate( **{x: function(new[x]).to_expr() for x", "+= [data.hv_row_id__ < rows.stop] return data.filter(predicates).drop([\"hv_row_id__\"]) else: if not isinstance(rows,", "return tuple( dataset.data.aggregate([column.min(), column.max()]).execute().values[0, :] ) @classmethod @cached def values(", "@cached def isscalar(cls, dataset, dim): return ( dataset.data[dataset.get_dimension(dim, strict=True).name] .distinct()", "expanded=True, flat=True, compute=True, keep_index=False, ): dimension = dataset.get_dimension(dimension, strict=True) data", "= None for i, v in enumerate(sample): p = data[dims[i].name]", "for the row number but i think that is still", "if isinstance(rows, slice): # We should use a pseudo column", "keep_index=False, ): dimension = dataset.get_dimension(dimension, strict=True) data = dataset.data[dimension.name] if", "an empty query to see if a row is returned.", "if issubclass(container_type, NdMapping): with item_check(False), sorted_context(False): return container_type(data, kdims=index_dims) else:", "import ibis.expr.operations data = dataset.data columns = [d.name for d", "row is returned. return bool(len(dataset.data[[]].head(1).execute())) @classmethod @cached def range(cls, dataset,", "elif values and keys is None: keys = [c for", "keys = [c for c in data.columns if c not", "predicates += [data.hv_row_id__ >= rows.start] if rows.stop: predicates += [data.hv_row_id__", ") @classmethod def select(cls, dataset, selection_mask=None, **selection): if selection_mask is", "issue #3392 bound = util.numpy_scalar_to_python(object.start) predicates.append(bound <= column) if object.stop", "strict=True) for d in dimensions] element_dims = [kdim for kdim", "if isinstance(object, tuple): object = slice(*object) alias = dataset.get_dimension(dim).name column", "== ibis.literal(util.numpy_scalar_to_python(v)) if predicate is None: predicate = p else:", "return ( data.filter(data.hv_row_id__ == rows)[columns] .head(1) .execute() .iloc[0, 0] )", "if cls.is_rowid_zero_indexed(data): return data.mutate(hv_row_id__=data.rowid()) else: return data.mutate(hv_row_id__=data.rowid() - 1) @classmethod", "predicates |= predicate return data if predicates is None else", "x not in columns } ) else: aggregation = new.aggregate(", "values] function = { numpy.min: ibis.expr.operations.Min, numpy.nanmin: ibis.expr.operations.Min, numpy.max: ibis.expr.operations.Max,", "mask_value=numpy.nan): raise NotImplementedError('Mask is not implemented for IbisInterface.') @classmethod @cached", "for IbisInterface.\") if density: hist = hist/expr.count().execute() return hist, bins", "|= predicate return data if predicates is None else data.filter(predicates)", "isinstance(columns.bounds[1], int) else None if keys and values is None:", "= all(map(util.isscalar, index)) if isinstance(columns, slice): columns = [x.name for", "awaiting # a pr on ibis if any(x is not", "values): params = eltype.param.objects() index = params[\"kdims\"] columns = params[\"vdims\"]", "dataset, dimension): dimension = dataset.get_dimension(dimension, strict=True) if cls.dtype(dataset, dimension).kind in", "condition = ( predicate if condition is None else condition", "alias = dataset.get_dimension(dim).name column = dataset.data[alias] if isinstance(object, slice): if", "keys == [] and values is None: values = list(data.columns[:", "Element): group_kwargs = dict(util.get_param_values(dataset), kdims=element_dims) group_kwargs.update(kwargs) group_kwargs[\"dataset\"] = dataset.dataset group_by", "( data.filter(data.hv_row_id__ == rows)[columns] .head(1) .execute() .iloc[0, 0] ) if", "query. return dataset.data[[]].count().execute() @classmethod @cached def nonzero(cls, dataset): # Make", "item_check, sorted_context from .interface import Interface from . import pandas", "cls.loaded(): return False from ibis.expr.types import Expr return isinstance(obj, Expr)", "vdim): import ibis data = dataset.data if dimension.name not in", "return data @classmethod def select_mask(cls, dataset, selection): import ibis predicates", "= data.filter(selection_mask) if indexed and data.count().execute() == 1 and len(dataset.vdims)", ") @classmethod @cached def values( cls, dataset, dimension, expanded=True, flat=True,", "updates versions @classmethod def has_rowid(cls): import ibis.expr.operations return hasattr(ibis.expr.operations, \"RowID\")", "None: keys = [c for c in data.columns if c", "if \"hv_row_id__\" in data.columns: return data if cls.is_rowid_zero_indexed(data): return data.mutate(hv_row_id__=data.rowid())", "dimension = dataset.get_dimension(dimension, strict=True) if cls.dtype(dataset, dimension).kind in 'SUO': return", "return data[data[dims[0].name].isin(items)] predicates = None for sample in samples: if", "dtype(cls, dataset, dimension): dimension = dataset.get_dimension(dimension) return dataset.data.head(0).execute().dtypes[dimension.name] dimension_type =", "else None if keys and values is None: values =", "= params[\"kdims\"] columns = params[\"vdims\"] if isinstance(index.bounds[1], int): ndim =", "based on the predicate defined. data = [ ( tuple(s.values.tolist()),", "cls._index_ibis_table(data) if selection_mask.dtype == numpy.dtype(\"bool\"): selection_mask = numpy.where(selection_mask)[0] data =", "= \"ibis\" default_partitions = 100 zero_indexed_backend_modules = [ 'ibis.backends.omniscidb.client', ]", "[data.hv_row_id__ < rows.stop] return data.filter(predicates).drop([\"hv_row_id__\"]) else: if not isinstance(rows, Iterable):", "if util.isscalar(sample): sample = [sample] if not sample: continue predicate", "ibis.Expr): predicates.append(object) else: predicates.append(column == object) return predicates @classmethod def", "from .interface import Interface from . import pandas from .util", "from ..ndmapping import NdMapping, item_check, sorted_context from .interface import Interface", "numpy.std: ibis.expr.operations.StandardDev, numpy.nanstd: ibis.expr.operations.StandardDev, numpy.sum: ibis.expr.operations.Sum, numpy.nansum: ibis.expr.operations.Sum, numpy.var: ibis.expr.operations.Variance,", "dataset.data.mutate( **{v.name: dataset.data[k] for k, v in dimensions.items()} ) validate", "v in s.to_dict().items()] ), **group_kwargs ), ) for i, s", "**selection): if selection_mask is None: selection_mask = cls.select_mask(dataset, selection) indexed", "= dataset.get_dimension(dimension) return dataset.data.head(0).execute().dtypes[dimension.name] dimension_type = dtype @classmethod def sort(cls,", "data = dataset.data columns = [d.name for d in dataset.kdims", "dimension_type = dtype @classmethod def sort(cls, dataset, by=[], reverse=False): return", "if density: hist = hist/expr.count().execute() return hist, bins @classmethod @cached", "else s for s in samples] return data[data[dims[0].name].isin(items)] predicates =", "if key not in keys ] elif keys == []", "> 1 or data[[]].count().execute() != 1: return data return data.execute().iat[0,", "key not in keys ] elif keys == [] and", "aggregation = new.aggregate( **{x: function(new[x]).to_expr() for x in new.columns} )", "is not None: return Interface.range(dataset, dimension) column = dataset.data[dimension.name] return", "elif isinstance(object, (set, list)): # rowid conditions condition = None", "supported for ibis version %s.\" % ibis.__version__ ) if \"hv_row_id__\"", "len(dataset.vdims) == 1: return data[dataset.vdims[0].name].execute().iloc[0] return data @classmethod def select_mask(cls,", "predicate ) if condition is not None: predicates.append(condition) elif callable(object):", "**{ x: function(new[x]).to_expr() for x in new.columns if x not", "return data, dict(kdims=keys, vdims=values), {} @classmethod def compute(cls, dataset): return", "numpy.min: ibis.expr.operations.Min, numpy.nanmin: ibis.expr.operations.Min, numpy.max: ibis.expr.operations.Max, numpy.nanmax: ibis.expr.operations.Max, numpy.mean: ibis.expr.operations.Mean,", "strict=True).name] .distinct() .count() .compute() == 1 ) @classmethod def select(cls,", "all(map(util.isscalar, index)) if isinstance(columns, slice): columns = [x.name for x", "+ nvdim) if nvdim else None)] if key not in", "# a pr on ibis if any(x is not None", "flat=True, compute=True, keep_index=False, ): dimension = dataset.get_dimension(dimension, strict=True) data =", "selection_mask is not None and not (isinstance(selection_mask, list) and not", "@classmethod def init(cls, eltype, data, keys, values): params = eltype.param.objects()", "numpy.nanmean: ibis.expr.operations.Mean, numpy.std: ibis.expr.operations.StandardDev, numpy.nanstd: ibis.expr.operations.StandardDev, numpy.sum: ibis.expr.operations.Sum, numpy.nansum: ibis.expr.operations.Sum,", "in data.columns if c not in keys] elif values and", "{} if group_type != \"raw\" and issubclass(group_type, Element): group_kwargs =", "nvdim else None]) return data, dict(kdims=keys, vdims=values), {} @classmethod def", "# execute a query against the table to find the", "in dimensions] values = dataset.dimensions(\"value\", label=\"name\") new = data[columns +", "if a row is returned. return bool(len(dataset.data[[]].head(1).execute())) @classmethod @cached def", "numpy.isnan(b): continue hist[int(b)] = v if weights is not None:", "x not in columns } ) else: aggregation = selection.aggregate(", "sample: continue predicate = None for i, v in enumerate(sample):", "Workaround for dask issue #3392 bound = util.numpy_scalar_to_python(object.start) predicates.append(bound <=", "either ibis.Expr or scalar.\" % type(values).__name__) data = data.mutate(**{dimension.name: values})", "= cls.select_mask(dataset, selection) indexed = cls.indexed(dataset, selection) data = dataset.data", ".iloc[0, 0] ) if isinstance(rows, slice): # We should use", "\"not implemented for IbisInterface.\") if density: hist = hist/expr.count().execute() return", "'ibis.backends.omniscidb.client', ] # the rowid is needed until ibis updates", "dimension) column = dataset.data[dimension.name] return tuple( dataset.data.aggregate([column.min(), column.max()]).execute().values[0, :] )", "data.mutate(hv_row_id__=data.rowid() - 1) @classmethod def iloc(cls, dataset, index): rows, columns", "predicate is None: predicate = p else: predicate &= p", "i think that is still awaiting # a pr on", "import Iterable from .. import util from ..element import Element", "slice): # We should use a pseudo column for the", "not in keys] elif values and keys is None: keys", "= data.distinct() return data if keep_index or not compute else", "ibis.literal(util.numpy_scalar_to_python(v)) if predicate is None: predicate = p else: predicate", "not None: raise NotImplementedError(\"Weighted histograms currently \" \"not implemented for", "i, v in enumerate(sample): p = data[dims[i].name] == ibis.literal(util.numpy_scalar_to_python(v)) if", "return data if predicates is None else data.filter(predicates) @classmethod def", "data[\"hv_row_id__\"].isin(list(map(int, selection_mask))) ).drop([\"hv_row_id__\"]) elif selection_mask is not None and not", "%s.\" % ibis.__version__ ) if \"hv_row_id__\" in data.columns: return data", "\"RowID\") @classmethod def is_rowid_zero_indexed(cls, data): try: from ibis.client import find_backends,", "data in the appropriate format for the interface, return a", "is not None and not (isinstance(selection_mask, list) and not selection_mask):", "import pandas from .util import cached class IbisInterface(Interface): types =", "if indexed and data.count().execute() == 1 and len(dataset.vdims) == 1:", "cls._index_ibis_table(dataset.data[columns]) if scalar: return ( data.filter(data.hv_row_id__ == rows)[columns] .head(1) .execute()", "| predicate ) if condition is not None: predicates.append(condition) elif", "vdims=values), {} @classmethod def compute(cls, dataset): return dataset.clone(dataset.data.execute()) @classmethod def", "else None)] if key not in keys ] elif keys", "Expr return isinstance(obj, Expr) @classmethod def init(cls, eltype, data, keys,", "columns = [dataset.get_dimension(d).name for d in columns] data = cls._index_ibis_table(dataset.data[columns])", "column for the row number but i think that is", "else data.filter(predicates) @classmethod def aggregate(cls, dataset, dimensions, function, **kwargs): import", "= data.mutate(**{dimension.name: values}) return data @classmethod @cached def isscalar(cls, dataset,", "object.stop is not None: bound = util.numpy_scalar_to_python(object.stop) predicates.append(column < bound)", "= dataset.data columns = [d.name for d in dataset.kdims if", "new = data[columns + values] function = { numpy.min: ibis.expr.operations.Min,", "object.start is not None: # Workaround for dask issue #3392", "100 zero_indexed_backend_modules = [ 'ibis.backends.omniscidb.client', ] # the rowid is", "dimensions): return dataset.data.mutate( **{v.name: dataset.data[k] for k, v in dimensions.items()}", "&= p if predicates is None: predicates = predicate else:", "v for k, v in s.to_dict().items()] ), **group_kwargs ), )", "container_type, group_type, **kwargs): # aggregate the necesary dimensions index_dims =", "is still awaiting # a pr on ibis if any(x", "compute=True, keep_index=False, ): dimension = dataset.get_dimension(dimension, strict=True) data = dataset.data[dimension.name]", "dataset.data.mutate(**new_data) @classmethod def add_dimension(cls, dataset, dimension, dim_pos, values, vdim): import", "items = [s[0] if isinstance(s, tuple) else s for s", "[c for c in data.columns if c not in values][:ndim]", "if not isinstance(values, ibis.Expr) and not numpy.isscalar(values): raise ValueError(\"Cannot assign", "if x not in columns } ) else: aggregation =", "\"hv_row_id__\" in data.columns: return data if cls.is_rowid_zero_indexed(data): return data.mutate(hv_row_id__=data.rowid()) else:", "None: values = [c for c in data.columns if c", "is not implemented for IbisInterface.') @classmethod @cached def dframe(cls, dataset,", "columns = index scalar = all(map(util.isscalar, index)) if isinstance(columns, slice):", "= validate_backends(list(find_backends(data))) except Exception: backend = data._find_backend() return type(backend).__module__ in", "selection) indexed = cls.indexed(dataset, selection) data = dataset.data if isinstance(selection_mask,", "**{x: function(new[x]).to_expr() for x in new.columns} ) dropped = [x", "columns = [d.name for d in dataset.kdims if d in", "is not None for x in (rows.start, rows.stop, rows.step)): predicates", "[dataset.get_dimension(columns).name] else: columns = [dataset.get_dimension(d).name for d in columns] data", "slice): columns = [x.name for x in dataset.dimensions()[columns]] elif numpy.isscalar(columns):", "data.mutate(hv_row_id__=data.rowid()) else: return data.mutate(hv_row_id__=data.rowid() - 1) @classmethod def iloc(cls, dataset,", "import Iterable except ImportError: from collections import Iterable from ..", "redim(cls, dataset, dimensions): return dataset.data.mutate( **{v.name: dataset.data[k] for k, v", "for k, v in dimensions.items()} ) validate = pandas.PandasInterface.validate reindex", "dataset.data[k] for k, v in dimensions.items()} ) validate = pandas.PandasInterface.validate", "1 ) @classmethod def select(cls, dataset, selection_mask=None, **selection): if selection_mask", "data): import ibis if not cls.has_rowid(): raise ValueError( \"iloc expressions", "interface, return a simple scalar. \"\"\" if len(data.columns) > 1", "in data.columns[ndim : ((ndim + nvdim) if nvdim else None)]", "s in samples] return data[data[dims[0].name].isin(items)] predicates = None for sample", "dataset, data): \"\"\" Given a dataset object and data in", "def shape(cls, dataset): return cls.length(dataset), len(dataset.data.columns) @classmethod @cached def dtype(cls,", "rows.start] if rows.stop: predicates += [data.hv_row_id__ < rows.stop] return data.filter(predicates).drop([\"hv_row_id__\"])", "@classmethod def redim(cls, dataset, dimensions): return dataset.data.mutate( **{v.name: dataset.data[k] for", "see if a row is returned. return bool(len(dataset.data[[]].head(1).execute())) @classmethod @cached", "is None: keys = list(data.columns[:ndim]) if values is None: values", "] elif keys == [] and values is None: values", "{ numpy.min: ibis.expr.operations.Min, numpy.nanmin: ibis.expr.operations.Min, numpy.max: ibis.expr.operations.Max, numpy.nanmax: ibis.expr.operations.Max, numpy.mean:", "rows, columns = index scalar = all(map(util.isscalar, index)) if isinstance(columns,", "isinstance(object, tuple): object = slice(*object) alias = dataset.get_dimension(dim).name column =", "is numpy.count_nonzero: aggregation = selection.aggregate( **{ x: ibis.expr.operations.Count(new[x], where=new[x] !=", "dataset.get_dimension(dimension) return dataset.data.head(0).execute().dtypes[dimension.name] dimension_type = dtype @classmethod def sort(cls, dataset,", "against the table to find the unique groups. groups =", "id in object: predicate = column == id condition =", "dataset object and data in the appropriate format for the", "x in values if x not in data.columns] return aggregation,", "= 100 zero_indexed_backend_modules = [ 'ibis.backends.omniscidb.client', ] # the rowid", "dim_pos, values, vdim): import ibis data = dataset.data if dimension.name", "= list(data.columns[:ndim]) if values is None: values = [ key", "pseudo column for the row number but i think that", "d in dataset.kdims if d in dimensions] values = dataset.dimensions(\"value\",", "column = dataset.data[dimension.name] return tuple( dataset.data.aggregate([column.min(), column.max()]).execute().values[0, :] ) @classmethod", ") validate = pandas.PandasInterface.validate reindex = pandas.PandasInterface.reindex @classmethod def _index_ibis_table(cls,", "dataset, dimension, dim_pos, values, vdim): import ibis data = dataset.data", "s in samples): items = [s[0] if isinstance(s, tuple) else", "elif numpy.isscalar(columns): columns = [dataset.get_dimension(columns).name] else: columns = [dataset.get_dimension(d).name for", "1) @classmethod def iloc(cls, dataset, index): rows, columns = index", "@classmethod def sort(cls, dataset, by=[], reverse=False): return dataset.data.sort_by([(dataset.get_dimension(x).name, not reverse)", "= dataset.data.groupby(group_by).aggregate().execute() # filter each group based on the predicate", "data.filter(predicates) @classmethod def aggregate(cls, dataset, dimensions, function, **kwargs): import ibis.expr.operations", "len(dimensions): selection = new.groupby(columns) if function is numpy.count_nonzero: aggregation =", "rows.stop: predicates += [data.hv_row_id__ < rows.stop] return data.filter(predicates).drop([\"hv_row_id__\"]) else: if", "in index_dims] group_kwargs = {} if group_type != \"raw\" and", "dimensions] element_dims = [kdim for kdim in dataset.kdims if kdim", "float(v) for v in bins] binned = expr.bucket(bins).name('bucket') hist =", "rows.step)): predicates = [] if rows.start: predicates += [data.hv_row_id__ >=", "= dataset.data[alias] if isinstance(object, slice): if object.start is not None:", "values = [ key for key in data.columns[ndim : ((ndim", "and issubclass(group_type, Element): group_kwargs = dict(util.get_param_values(dataset), kdims=element_dims) group_kwargs.update(kwargs) group_kwargs[\"dataset\"] =", "k, v in s.to_dict().items()] ), **group_kwargs ), ) for i,", "all(util.isscalar(s) or len(s) == 1 for s in samples): items", "data[dataset.vdims[0].name].execute().iloc[0] return data @classmethod def select_mask(cls, dataset, selection): import ibis", "import ibis if not cls.has_rowid(): raise ValueError( \"iloc expressions are", "eltype, data, keys, values): params = eltype.param.objects() index = params[\"kdims\"]", "is None: selection_mask = cls.select_mask(dataset, selection) indexed = cls.indexed(dataset, selection)", "not None and not (isinstance(selection_mask, list) and not selection_mask): data", "object: predicate = column == id condition = ( predicate", "bool(len(dataset.data[[]].head(1).execute())) @classmethod @cached def range(cls, dataset, dimension): dimension = dataset.get_dimension(dimension,", "function, **kwargs): import ibis.expr.operations data = dataset.data columns = [d.name", "density=True, weights=None): bins = numpy.asarray(bins) bins = [int(v) if bins.dtype.kind", "isinstance(object, slice): if object.start is not None: # Workaround for", "[ ( tuple(s.values.tolist()), group_type( dataset.data.filter( [dataset.data[k] == v for k,", "= dataset.dimensions() data = dataset.data if all(util.isscalar(s) or len(s) ==", "# We should use a pseudo column for the row", "obj): if not cls.loaded(): return False from ibis.expr.types import Expr", "if len(data.columns) > 1 or data[[]].count().execute() != 1: return data", "import ibis.expr.operations return hasattr(ibis.expr.operations, \"RowID\") @classmethod def is_rowid_zero_indexed(cls, data): try:", "= data[dims[i].name] == ibis.literal(util.numpy_scalar_to_python(v)) if predicate is None: predicate =", "and values is None: values = list(data.columns[: nvdim if nvdim", "cls, dataset, dimension, expanded=True, flat=True, compute=True, keep_index=False, ): dimension =", "list(data.columns[: nvdim if nvdim else None]) return data, dict(kdims=keys, vdims=values),", "with item_check(False), sorted_context(False): return container_type(data, kdims=index_dims) else: return container_type(data) @classmethod", "elif selection_mask is not None and not (isinstance(selection_mask, list) and", "is returned. return bool(len(dataset.data[[]].head(1).execute())) @classmethod @cached def range(cls, dataset, dimension):", "@classmethod @cached def values( cls, dataset, dimension, expanded=True, flat=True, compute=True,", "= index scalar = all(map(util.isscalar, index)) if isinstance(columns, slice): columns", "# Get the length by counting the length of an", "hist_bins = binned.value_counts().sort_by('bucket').execute() for b, v in zip(hist_bins['bucket'], hist_bins['count']): if", "import ibis predicates = [] for dim, object in selection.items():", "c not in keys] elif values and keys is None:", "] if issubclass(container_type, NdMapping): with item_check(False), sorted_context(False): return container_type(data, kdims=index_dims)", "isinstance(selection_mask, numpy.ndarray): data = cls._index_ibis_table(data) if selection_mask.dtype == numpy.dtype(\"bool\"): selection_mask", "if isinstance(columns.bounds[1], int) else None if keys and values is", "for i, v in enumerate(sample): p = data[dims[i].name] == ibis.literal(util.numpy_scalar_to_python(v))", "select_mask(cls, dataset, selection): import ibis predicates = [] for dim,", "def compute(cls, dataset): return dataset.clone(dataset.data.execute()) @classmethod def persist(cls, dataset): return", "not compute else data.execute().values @classmethod def histogram(cls, expr, bins, density=True,", "for v in bins] binned = expr.bucket(bins).name('bucket') hist = numpy.zeros(len(bins)-1)", "else: if not isinstance(rows, Iterable): rows = [rows] return data.filter([data.hv_row_id__.isin(rows)]).drop([\"hv_row_id__\"])", "else: return data.mutate(hv_row_id__=data.rowid() - 1) @classmethod def iloc(cls, dataset, index):", "in new.columns if x not in columns } ) else:", "if selection_mask.dtype == numpy.dtype(\"bool\"): selection_mask = numpy.where(selection_mask)[0] data = data.filter(", "dataset, samples=[]): import ibis dims = dataset.dimensions() data = dataset.data", "values is None: values = [ key for key in", "@classmethod def loaded(cls): return \"ibis\" in sys.modules @classmethod def applies(cls,", "x: function(new[x]).to_expr() for x in new.columns if x not in", "if x not in data.columns] return aggregation, dropped @classmethod @cached", "keys, values): params = eltype.param.objects() index = params[\"kdims\"] columns =", "return None, None if dimension.nodata is not None: return Interface.range(dataset,", "selection_mask=None, **selection): if selection_mask is None: selection_mask = cls.select_mask(dataset, selection)", "@classmethod def persist(cls, dataset): return cls.compute(dataset) @classmethod @cached def length(self,", "until ibis updates versions @classmethod def has_rowid(cls): import ibis.expr.operations return", "weights is not None: raise NotImplementedError(\"Weighted histograms currently \" \"not", "predicate &= p if predicates is None: predicates = predicate", "return hist, bins @classmethod @cached def shape(cls, dataset): return cls.length(dataset),", "x in new.columns} ) dropped = [x for x in", "function is numpy.count_nonzero: aggregation = selection.aggregate( **{ x: ibis.expr.operations.Count(new[x], where=new[x]", "return cls.compute(dataset) @classmethod @cached def length(self, dataset): # Get the", "@classmethod def aggregate(cls, dataset, dimensions, function, **kwargs): import ibis.expr.operations data", "data, dict(kdims=keys, vdims=values), {} @classmethod def compute(cls, dataset): return dataset.clone(dataset.data.execute())", "dataset.data[dimension.name] if not expanded: data = data.distinct() return data if", "values( cls, dataset, dimension, expanded=True, flat=True, compute=True, keep_index=False, ): dimension", "return type(backend).__module__ in cls.zero_indexed_backend_modules @classmethod def loaded(cls): return \"ibis\" in", "isinstance(s, tuple) else s for s in samples] return data[data[dims[0].name].isin(items)]", "for ibis version %s.\" % ibis.__version__ ) if \"hv_row_id__\" in", "pandas.PandasInterface.reindex @classmethod def _index_ibis_table(cls, data): import ibis if not cls.has_rowid():", "= dataset.data if isinstance(selection_mask, numpy.ndarray): data = cls._index_ibis_table(data) if selection_mask.dtype", "[] for dim, object in selection.items(): if isinstance(object, tuple): object", "not expanded: data = data.distinct() return data if keep_index or", "new.columns} ) dropped = [x for x in values if", "the length by counting the length of an empty query.", "group_kwargs.update(kwargs) group_kwargs[\"dataset\"] = dataset.dataset group_by = [d.name for d in", "= [int(v) if bins.dtype.kind in 'iu' else float(v) for v", "[ 'ibis.backends.omniscidb.client', ] # the rowid is needed until ibis", "ValueError( \"iloc expressions are not supported for ibis version %s.\"", "dict(util.get_param_values(dataset), kdims=element_dims) group_kwargs.update(kwargs) group_kwargs[\"dataset\"] = dataset.dataset group_by = [d.name for", "values = dataset.dimensions(\"value\", label=\"name\") new = data[columns + values] function", "nvdim else None)] if key not in keys ] elif", "else float(v) for v in bins] binned = expr.bucket(bins).name('bucket') hist", "and data.count().execute() == 1 and len(dataset.vdims) == 1: return data[dataset.vdims[0].name].execute().iloc[0]", "numpy.where(selection_mask)[0] data = data.filter( data[\"hv_row_id__\"].isin(list(map(int, selection_mask))) ).drop([\"hv_row_id__\"]) elif selection_mask is", "Interface from . import pandas from .util import cached class", "[dataset.data[k] == v for k, v in s.to_dict().items()] ), **group_kwargs", "indexed and data.count().execute() == 1 and len(dataset.vdims) == 1: return", "util.numpy_scalar_to_python(object.start) predicates.append(bound <= column) if object.stop is not None: bound", "from .util import cached class IbisInterface(Interface): types = () datatype", "not sample: continue predicate = None for i, v in", "object) return predicates @classmethod def sample(cls, dataset, samples=[]): import ibis", "NdMapping, item_check, sorted_context from .interface import Interface from . import", "data.execute().iat[0, 0] @classmethod def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs):", "def mask(cls, dataset, mask, mask_value=numpy.nan): raise NotImplementedError('Mask is not implemented", "None: bound = util.numpy_scalar_to_python(object.stop) predicates.append(column < bound) elif isinstance(object, (set,", "data = data.mutate(**{dimension.name: values}) return data @classmethod @cached def isscalar(cls,", "[s[0] if isinstance(s, tuple) else s for s in samples]", "predicate else: predicates |= predicate return data if predicates is", "in dataset.dimensions()[columns]] elif numpy.isscalar(columns): columns = [dataset.get_dimension(columns).name] else: columns =", "isscalar(cls, dataset, dim): return ( dataset.data[dataset.get_dimension(dim, strict=True).name] .distinct() .count() .compute()", "except Exception: backend = data._find_backend() return type(backend).__module__ in cls.zero_indexed_backend_modules @classmethod", "int): ndim = min([index.bounds[1], len(index.default)]) else: ndim = None nvdim", "not None: bound = util.numpy_scalar_to_python(object.stop) predicates.append(column < bound) elif isinstance(object,", ") if condition is not None: predicates.append(condition) elif callable(object): predicates.append(object(column))", "from .. import util from ..element import Element from ..ndmapping", "return dataset.clone(dataset.data.execute()) @classmethod def persist(cls, dataset): return cls.compute(dataset) @classmethod @cached", "1 for s in samples): items = [s[0] if isinstance(s,", "sorted_context(False): return container_type(data, kdims=index_dims) else: return container_type(data) @classmethod def assign(cls,", "data[[]].count().execute() != 1: return data return data.execute().iat[0, 0] @classmethod def", "= None nvdim = columns.bounds[1] if isinstance(columns.bounds[1], int) else None", "import NdMapping, item_check, sorted_context from .interface import Interface from .", "= predicate else: predicates |= predicate return data if predicates", "predicates = [] if rows.start: predicates += [data.hv_row_id__ >= rows.start]", "= [] for dim, object in selection.items(): if isinstance(object, tuple):", "isinstance(rows, slice): # We should use a pseudo column for", "data[dims[i].name] == ibis.literal(util.numpy_scalar_to_python(v)) if predicate is None: predicate = p", "@classmethod def _index_ibis_table(cls, data): import ibis if not cls.has_rowid(): raise", "table column, \" \"expecting either ibis.Expr or scalar.\" % type(values).__name__)", "to see if a row is returned. return bool(len(dataset.data[[]].head(1).execute())) @classmethod", "raise ValueError(\"Cannot assign %s type as a Ibis table column,", "raise NotImplementedError('Mask is not implemented for IbisInterface.') @classmethod @cached def", "rowid is needed until ibis updates versions @classmethod def has_rowid(cls):", "None: predicates = predicate else: predicates |= predicate return data", "len(s) == 1 for s in samples): items = [s[0]", "condition is None else condition | predicate ) if condition", "sort(cls, dataset, by=[], reverse=False): return dataset.data.sort_by([(dataset.get_dimension(x).name, not reverse) for x", "unique groups. groups = dataset.data.groupby(group_by).aggregate().execute() # filter each group based", "return container_type(data) @classmethod def assign(cls, dataset, new_data): return dataset.data.mutate(**new_data) @classmethod", "None: # Workaround for dask issue #3392 bound = util.numpy_scalar_to_python(object.start)", "row number but i think that is still awaiting #", "< bound) elif isinstance(object, (set, list)): # rowid conditions condition", "c in data.columns if c not in keys] elif values", "\"\"\" Given a dataset object and data in the appropriate", "if predicate is None: predicate = p else: predicate &=", "not in values][:ndim] elif keys is None: keys = list(data.columns[:ndim])", "group_kwargs = dict(util.get_param_values(dataset), kdims=element_dims) group_kwargs.update(kwargs) group_kwargs[\"dataset\"] = dataset.dataset group_by =", "if scalar: return ( data.filter(data.hv_row_id__ == rows)[columns] .head(1) .execute() .iloc[0,", "reindex = pandas.PandasInterface.reindex @classmethod def _index_ibis_table(cls, data): import ibis if", "sys.modules @classmethod def applies(cls, obj): if not cls.loaded(): return False", "isinstance(object, ibis.Expr): predicates.append(object) else: predicates.append(column == object) return predicates @classmethod", "index scalar = all(map(util.isscalar, index)) if isinstance(columns, slice): columns =", "groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): # aggregate the necesary", "samples: if util.isscalar(sample): sample = [sample] if not sample: continue", "selection_mask is None: selection_mask = cls.select_mask(dataset, selection) indexed = cls.indexed(dataset,", "try: from collections.abc import Iterable except ImportError: from collections import", "try: from ibis.client import find_backends, validate_backends (backend,) = validate_backends(list(find_backends(data))) except", "data if keep_index or not compute else data.execute().values @classmethod def", "ibis predicates = [] for dim, object in selection.items(): if", "ibis.expr.operations.StandardDev, numpy.nanstd: ibis.expr.operations.StandardDev, numpy.sum: ibis.expr.operations.Sum, numpy.nansum: ibis.expr.operations.Sum, numpy.var: ibis.expr.operations.Variance, numpy.nanvar:", "if object.start is not None: # Workaround for dask issue", "should use a pseudo column for the row number but", "container_type(data, kdims=index_dims) else: return container_type(data) @classmethod def assign(cls, dataset, new_data):", "@classmethod @cached def shape(cls, dataset): return cls.length(dataset), len(dataset.data.columns) @classmethod @cached", "[ key for key in data.columns[ndim : ((ndim + nvdim)", "= dataset.dimensions(\"value\", label=\"name\") new = data[columns + values] function =", "% type(values).__name__) data = data.mutate(**{dimension.name: values}) return data @classmethod @cached", "predicate = column == id condition = ( predicate if", "[d.name for d in dataset.kdims if d in dimensions] values", "isinstance(obj, Expr) @classmethod def init(cls, eltype, data, keys, values): params", "}.get(function, function) if len(dimensions): selection = new.groupby(columns) if function is", ">= rows.start] if rows.stop: predicates += [data.hv_row_id__ < rows.stop] return", "ibis.expr.operations.Min, numpy.nanmin: ibis.expr.operations.Min, numpy.max: ibis.expr.operations.Max, numpy.nanmax: ibis.expr.operations.Max, numpy.mean: ibis.expr.operations.Mean, numpy.nanmean:", "= util.numpy_scalar_to_python(object.stop) predicates.append(column < bound) elif isinstance(object, (set, list)): #", "selection) data = dataset.data if isinstance(selection_mask, numpy.ndarray): data = cls._index_ibis_table(data)", "dimension.nodata is not None: return Interface.range(dataset, dimension) column = dataset.data[dimension.name]", "hist_bins['count']): if numpy.isnan(b): continue hist[int(b)] = v if weights is", "in columns] data = cls._index_ibis_table(dataset.data[columns]) if scalar: return ( data.filter(data.hv_row_id__", "@cached def nonzero(cls, dataset): # Make an empty query to" ]
[ "init_round(self, num_round): \"\"\" Launches the round number \"num_round\". :param num_round:", "of the models. \"\"\" last_tournament_id = \"0\" * TOURNAMENT_ID_WIDTH def", "num_round, winner): \"\"\" Registers the results of the round. :param", "the tournament instance. \"\"\" string_attributes = ['tournament_id', 'name', 'location', 'timer_type',", "for player in self.list_of_players: serialized_tournament['list_of_players'].append(player.player_to_dict()) serialized_tournament['start_date'] = str(self.start_date) serialized_tournament['end_date'] =", "number. :param winner: the list of the winners. :return: None.", "num_round: the round number. :param winner: the list of the", "list of identifier of the players who join the tournament.", "= name self.location = location self.start_date = None self.end_date =", "the tournament. Adds the tournament_id to the players list of", "8 NB_ROUND = 4 NB_PLAYERS = 8 NB_MATCH = 4", "attribute in string_attributes: serialized_tournament[attribute] = getattr(self, attribute) serialized_tournament['rounds'] = []", "serialized_tournament['end_date'] = str(self.end_date) return serialized_tournament def end_tournament(self): \"\"\" Handles the", "finished and the end date of the tournament. \"\"\" for", "the tournament. \"\"\" for player in self.list_of_players: player.actor.list_of_tournaments_played.append(self.tournament_id) self.finished =", "def register_round_results(self, num_round, winner): \"\"\" Registers the results of the", "tournament. Adds the tournament_id to the players list of tournaments.", "Defines the list of identifier of the players who join", "in self.list_of_players: serialized_tournament['list_of_players'].append(player.player_to_dict()) serialized_tournament['start_date'] = str(self.start_date) serialized_tournament['end_date'] = str(self.end_date) return", "the central piece of the models. \"\"\" last_tournament_id = \"0\"", "= None self.timer_type = timer_type self.description = description self.number_of_rounds =", "end date of the tournament. \"\"\" for player in self.list_of_players:", "self.name = name self.location = location self.start_date = None self.end_date", "\"\"\" Handles the tournament logic \"\"\" import datetime from chess.utils.utils", "get_new_id(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH) self.tournament_id = Tournament.last_tournament_id self.name = name self.location =", "= 8 NB_ROUND = 4 NB_PLAYERS = 8 NB_MATCH =", "__init__(self, name, location, timer_type, description): Tournament.last_tournament_id = get_new_id(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH) self.tournament_id", "= location self.start_date = None self.end_date = None self.timer_type =", "str(self.start_date) serialized_tournament['end_date'] = str(self.end_date) return serialized_tournament def end_tournament(self): \"\"\" Handles", "self.rounds[num_round].assign_points() self.rounds[num_round].finished = True self.rounds[num_round].memorize_opponents() self.rounds[num_round].rank_players() self.rounds[num_round].end_date = datetime.date.today() def", "the attribute finished and the end date of the tournament.", "= get_new_id(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH) self.tournament_id = Tournament.last_tournament_id self.name = name self.location", "self.finished = False def define_players(self, actors): \"\"\" Defines the list", "TOURNAMENT_ID_WIDTH = 8 NB_ROUND = 4 NB_PLAYERS = 8 NB_MATCH", "register_round_results(self, num_round, winner): \"\"\" Registers the results of the round.", "self.timer_type = timer_type self.description = description self.number_of_rounds = NB_ROUND self.rounds", "= [] self.list_of_players = [] self.players_assigned = False self.finished =", "NB_PLAYERS = 8 NB_MATCH = 4 class Tournament: \"\"\" The", "\"\"\" self.rounds[num_round].register_results(winner) self.rounds[num_round].assign_points() self.rounds[num_round].finished = True self.rounds[num_round].memorize_opponents() self.rounds[num_round].rank_players() self.rounds[num_round].end_date =", "False def define_players(self, actors): \"\"\" Defines the list of identifier", ":return: None \"\"\" tour = Round(num_round, self.tournament_id, self.list_of_players) tour.start_date =", "in self.rounds: serialized_tournament['rounds'].append(r0und.round_to_dict()) serialized_tournament['list_of_players'] = [] for player in self.list_of_players:", "self.list_of_players) tour.start_date = datetime.date.today() tour.rank_players() tour.define_matches() self.rounds.append(tour) def register_round_results(self, num_round,", "self.list_of_players: serialized_tournament['list_of_players'].append(player.player_to_dict()) serialized_tournament['start_date'] = str(self.start_date) serialized_tournament['end_date'] = str(self.end_date) return serialized_tournament", "serialized_tournament['list_of_players'].append(player.player_to_dict()) serialized_tournament['start_date'] = str(self.start_date) serialized_tournament['end_date'] = str(self.end_date) return serialized_tournament def", "\"\"\" for player in self.list_of_players: player.actor.list_of_tournaments_played.append(self.tournament_id) self.finished = True self.end_date", "self.start_date = None self.end_date = None self.timer_type = timer_type self.description", "tour.define_matches() self.rounds.append(tour) def register_round_results(self, num_round, winner): \"\"\" Registers the results", "define_players(self, actors): \"\"\" Defines the list of identifier of the", "Defines the attribute finished and the end date of the", "The class Tournament is the central piece of the models.", "TOURNAMENT_ID_WIDTH) self.tournament_id = Tournament.last_tournament_id self.name = name self.location = location", "= False def define_players(self, actors): \"\"\" Defines the list of", "dictionary of the tournament instance. \"\"\" string_attributes = ['tournament_id', 'name',", "import Round TOURNAMENT_ID_WIDTH = 8 NB_ROUND = 4 NB_PLAYERS =", "Round TOURNAMENT_ID_WIDTH = 8 NB_ROUND = 4 NB_PLAYERS = 8", ":return: None. \"\"\" self.rounds[num_round].register_results(winner) self.rounds[num_round].assign_points() self.rounds[num_round].finished = True self.rounds[num_round].memorize_opponents() self.rounds[num_round].rank_players()", "def tournament_to_dict(self): \"\"\" Converts the tournament into a dictionary :return:", "central piece of the models. \"\"\" last_tournament_id = \"0\" *", "import get_new_id from chess.models.actors import Player from chess.models.round import Round", "self.tournament_id, self.list_of_players) tour.start_date = datetime.date.today() tour.rank_players() tour.define_matches() self.rounds.append(tour) def register_round_results(self,", "actors: :return: None \"\"\" for num_player in range(NB_PLAYERS): self.list_of_players.append(Player(actors[num_player], self.tournament_id,", "models. \"\"\" last_tournament_id = \"0\" * TOURNAMENT_ID_WIDTH def __init__(self, name,", "\"\"\" Handles the end of the tournament. Adds the tournament_id", "4 NB_PLAYERS = 8 NB_MATCH = 4 class Tournament: \"\"\"", "logic \"\"\" import datetime from chess.utils.utils import get_new_id from chess.models.actors", "datetime.date.today() def tournament_to_dict(self): \"\"\" Converts the tournament into a dictionary", "\"\"\" Converts the tournament into a dictionary :return: dictionary of", "from chess.utils.utils import get_new_id from chess.models.actors import Player from chess.models.round", "last_tournament_id = \"0\" * TOURNAMENT_ID_WIDTH def __init__(self, name, location, timer_type,", "tournament into a dictionary :return: dictionary of the tournament instance.", "the players who join the tournament. :param actors: :return: None", "= datetime.date.today() tour.rank_players() tour.define_matches() self.rounds.append(tour) def register_round_results(self, num_round, winner): \"\"\"", "Converts the tournament into a dictionary :return: dictionary of the", "serialized_tournament = {} for attribute in string_attributes: serialized_tournament[attribute] = getattr(self,", "of the round played :return: None \"\"\" tour = Round(num_round,", "num_round): \"\"\" Launches the round number \"num_round\". :param num_round: number", "True self.rounds[num_round].memorize_opponents() self.rounds[num_round].rank_players() self.rounds[num_round].end_date = datetime.date.today() def tournament_to_dict(self): \"\"\" Converts", "of tournaments. Defines the attribute finished and the end date", "self.players_assigned = False self.finished = False def define_players(self, actors): \"\"\"", "is the central piece of the models. \"\"\" last_tournament_id =", "the tournament. :param actors: :return: None \"\"\" for num_player in", ":param winner: the list of the winners. :return: None. \"\"\"", "= description self.number_of_rounds = NB_ROUND self.rounds = [] self.list_of_players =", "the models. \"\"\" last_tournament_id = \"0\" * TOURNAMENT_ID_WIDTH def __init__(self,", "round. :param num_round: the round number. :param winner: the list", "tournament instance. \"\"\" string_attributes = ['tournament_id', 'name', 'location', 'timer_type', 'description',", "= [] for r0und in self.rounds: serialized_tournament['rounds'].append(r0und.round_to_dict()) serialized_tournament['list_of_players'] = []", "of the round. :param num_round: the round number. :param winner:", "4 class Tournament: \"\"\" The class Tournament is the central", "self.rounds[num_round].register_results(winner) self.rounds[num_round].assign_points() self.rounds[num_round].finished = True self.rounds[num_round].memorize_opponents() self.rounds[num_round].rank_players() self.rounds[num_round].end_date = datetime.date.today()", "\"\"\" Launches the round number \"num_round\". :param num_round: number of", "-*- \"\"\" Handles the tournament logic \"\"\" import datetime from", "'description', 'number_of_rounds', 'players_assigned'] serialized_tournament = {} for attribute in string_attributes:", "{} for attribute in string_attributes: serialized_tournament[attribute] = getattr(self, attribute) serialized_tournament['rounds']", "self.description = description self.number_of_rounds = NB_ROUND self.rounds = [] self.list_of_players", "\"\"\" for num_player in range(NB_PLAYERS): self.list_of_players.append(Player(actors[num_player], self.tournament_id, num_player)) def init_round(self,", "self.end_date = None self.timer_type = timer_type self.description = description self.number_of_rounds", "self.rounds[num_round].rank_players() self.rounds[num_round].end_date = datetime.date.today() def tournament_to_dict(self): \"\"\" Converts the tournament", "identifier of the players who join the tournament. :param actors:", "from chess.models.round import Round TOURNAMENT_ID_WIDTH = 8 NB_ROUND = 4", "def __init__(self, name, location, timer_type, description): Tournament.last_tournament_id = get_new_id(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH)", "None. \"\"\" self.rounds[num_round].register_results(winner) self.rounds[num_round].assign_points() self.rounds[num_round].finished = True self.rounds[num_round].memorize_opponents() self.rounds[num_round].rank_players() self.rounds[num_round].end_date", "chess.models.round import Round TOURNAMENT_ID_WIDTH = 8 NB_ROUND = 4 NB_PLAYERS", "to the players list of tournaments. Defines the attribute finished", "num_player)) def init_round(self, num_round): \"\"\" Launches the round number \"num_round\".", "tour.start_date = datetime.date.today() tour.rank_players() tour.define_matches() self.rounds.append(tour) def register_round_results(self, num_round, winner):", "the end date of the tournament. \"\"\" for player in", "tournament_to_dict(self): \"\"\" Converts the tournament into a dictionary :return: dictionary", "tournament. \"\"\" for player in self.list_of_players: player.actor.list_of_tournaments_played.append(self.tournament_id) self.finished = True", "the tournament into a dictionary :return: dictionary of the tournament", "Launches the round number \"num_round\". :param num_round: number of the", "= Round(num_round, self.tournament_id, self.list_of_players) tour.start_date = datetime.date.today() tour.rank_players() tour.define_matches() self.rounds.append(tour)", "winner: the list of the winners. :return: None. \"\"\" self.rounds[num_round].register_results(winner)", "Tournament: \"\"\" The class Tournament is the central piece of", "Round(num_round, self.tournament_id, self.list_of_players) tour.start_date = datetime.date.today() tour.rank_players() tour.define_matches() self.rounds.append(tour) def", "self.location = location self.start_date = None self.end_date = None self.timer_type", "= 4 NB_PLAYERS = 8 NB_MATCH = 4 class Tournament:", "timer_type self.description = description self.number_of_rounds = NB_ROUND self.rounds = []", "in range(NB_PLAYERS): self.list_of_players.append(Player(actors[num_player], self.tournament_id, num_player)) def init_round(self, num_round): \"\"\" Launches", "= [] self.players_assigned = False self.finished = False def define_players(self,", "the results of the round. :param num_round: the round number.", "instance. \"\"\" string_attributes = ['tournament_id', 'name', 'location', 'timer_type', 'description', 'number_of_rounds',", "= 4 class Tournament: \"\"\" The class Tournament is the", "of identifier of the players who join the tournament. :param", "serialized_tournament def end_tournament(self): \"\"\" Handles the end of the tournament.", "Adds the tournament_id to the players list of tournaments. Defines", "the tournament_id to the players list of tournaments. Defines the", "\"\"\" Registers the results of the round. :param num_round: the", "\"num_round\". :param num_round: number of the round played :return: None", "'players_assigned'] serialized_tournament = {} for attribute in string_attributes: serialized_tournament[attribute] =", "-*- coding: utf-8 -*- \"\"\" Handles the tournament logic \"\"\"", "[] for player in self.list_of_players: serialized_tournament['list_of_players'].append(player.player_to_dict()) serialized_tournament['start_date'] = str(self.start_date) serialized_tournament['end_date']", "serialized_tournament['start_date'] = str(self.start_date) serialized_tournament['end_date'] = str(self.end_date) return serialized_tournament def end_tournament(self):", "of the tournament. Adds the tournament_id to the players list", "self.rounds: serialized_tournament['rounds'].append(r0und.round_to_dict()) serialized_tournament['list_of_players'] = [] for player in self.list_of_players: serialized_tournament['list_of_players'].append(player.player_to_dict())", "self.tournament_id = Tournament.last_tournament_id self.name = name self.location = location self.start_date", "NB_ROUND = 4 NB_PLAYERS = 8 NB_MATCH = 4 class", "= False self.finished = False def define_players(self, actors): \"\"\" Defines", "the list of identifier of the players who join the", "date of the tournament. \"\"\" for player in self.list_of_players: player.actor.list_of_tournaments_played.append(self.tournament_id)", ":param num_round: the round number. :param winner: the list of", "attribute) serialized_tournament['rounds'] = [] for r0und in self.rounds: serialized_tournament['rounds'].append(r0und.round_to_dict()) serialized_tournament['list_of_players']", "utf-8 -*- \"\"\" Handles the tournament logic \"\"\" import datetime", "number of the round played :return: None \"\"\" tour =", "'number_of_rounds', 'players_assigned'] serialized_tournament = {} for attribute in string_attributes: serialized_tournament[attribute]", "end_tournament(self): \"\"\" Handles the end of the tournament. Adds the", "Tournament is the central piece of the models. \"\"\" last_tournament_id", "[] self.list_of_players = [] self.players_assigned = False self.finished = False", "class Tournament is the central piece of the models. \"\"\"", "self.list_of_players = [] self.players_assigned = False self.finished = False def", "self.number_of_rounds = NB_ROUND self.rounds = [] self.list_of_players = [] self.players_assigned", "\"0\" * TOURNAMENT_ID_WIDTH def __init__(self, name, location, timer_type, description): Tournament.last_tournament_id", "\"\"\" tour = Round(num_round, self.tournament_id, self.list_of_players) tour.start_date = datetime.date.today() tour.rank_players()", "NB_MATCH = 4 class Tournament: \"\"\" The class Tournament is", "the round number. :param winner: the list of the winners.", "self.list_of_players.append(Player(actors[num_player], self.tournament_id, num_player)) def init_round(self, num_round): \"\"\" Launches the round", "tour = Round(num_round, self.tournament_id, self.list_of_players) tour.start_date = datetime.date.today() tour.rank_players() tour.define_matches()", "serialized_tournament['rounds'].append(r0und.round_to_dict()) serialized_tournament['list_of_players'] = [] for player in self.list_of_players: serialized_tournament['list_of_players'].append(player.player_to_dict()) serialized_tournament['start_date']", ":return: None \"\"\" for num_player in range(NB_PLAYERS): self.list_of_players.append(Player(actors[num_player], self.tournament_id, num_player))", "string_attributes = ['tournament_id', 'name', 'location', 'timer_type', 'description', 'number_of_rounds', 'players_assigned'] serialized_tournament", "# -*- coding: utf-8 -*- \"\"\" Handles the tournament logic", "['tournament_id', 'name', 'location', 'timer_type', 'description', 'number_of_rounds', 'players_assigned'] serialized_tournament = {}", "return serialized_tournament def end_tournament(self): \"\"\" Handles the end of the", "coding: utf-8 -*- \"\"\" Handles the tournament logic \"\"\" import", "Tournament.last_tournament_id self.name = name self.location = location self.start_date = None", "Handles the tournament logic \"\"\" import datetime from chess.utils.utils import", "= getattr(self, attribute) serialized_tournament['rounds'] = [] for r0und in self.rounds:", "location, timer_type, description): Tournament.last_tournament_id = get_new_id(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH) self.tournament_id = Tournament.last_tournament_id", "serialized_tournament['list_of_players'] = [] for player in self.list_of_players: serialized_tournament['list_of_players'].append(player.player_to_dict()) serialized_tournament['start_date'] =", "tournament logic \"\"\" import datetime from chess.utils.utils import get_new_id from", "Player from chess.models.round import Round TOURNAMENT_ID_WIDTH = 8 NB_ROUND =", "def define_players(self, actors): \"\"\" Defines the list of identifier of", "Tournament.last_tournament_id = get_new_id(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH) self.tournament_id = Tournament.last_tournament_id self.name = name", "name self.location = location self.start_date = None self.end_date = None", "def end_tournament(self): \"\"\" Handles the end of the tournament. Adds", "piece of the models. \"\"\" last_tournament_id = \"0\" * TOURNAMENT_ID_WIDTH", "None self.timer_type = timer_type self.description = description self.number_of_rounds = NB_ROUND", "\"\"\" The class Tournament is the central piece of the", "= ['tournament_id', 'name', 'location', 'timer_type', 'description', 'number_of_rounds', 'players_assigned'] serialized_tournament =", ":param num_round: number of the round played :return: None \"\"\"", "player in self.list_of_players: player.actor.list_of_tournaments_played.append(self.tournament_id) self.finished = True self.end_date = datetime.date.today()", "datetime.date.today() tour.rank_players() tour.define_matches() self.rounds.append(tour) def register_round_results(self, num_round, winner): \"\"\" Registers", "description self.number_of_rounds = NB_ROUND self.rounds = [] self.list_of_players = []", "list of tournaments. Defines the attribute finished and the end", "the winners. :return: None. \"\"\" self.rounds[num_round].register_results(winner) self.rounds[num_round].assign_points() self.rounds[num_round].finished = True", "getattr(self, attribute) serialized_tournament['rounds'] = [] for r0und in self.rounds: serialized_tournament['rounds'].append(r0und.round_to_dict())", "and the end date of the tournament. \"\"\" for player", "tour.rank_players() tour.define_matches() self.rounds.append(tour) def register_round_results(self, num_round, winner): \"\"\" Registers the", "self.rounds = [] self.list_of_players = [] self.players_assigned = False self.finished", "who join the tournament. :param actors: :return: None \"\"\" for", "num_round: number of the round played :return: None \"\"\" tour", "= datetime.date.today() def tournament_to_dict(self): \"\"\" Converts the tournament into a", "self.rounds[num_round].memorize_opponents() self.rounds[num_round].rank_players() self.rounds[num_round].end_date = datetime.date.today() def tournament_to_dict(self): \"\"\" Converts the", "of the tournament instance. \"\"\" string_attributes = ['tournament_id', 'name', 'location',", "self.rounds.append(tour) def register_round_results(self, num_round, winner): \"\"\" Registers the results of", "the round. :param num_round: the round number. :param winner: the", "of the tournament. \"\"\" for player in self.list_of_players: player.actor.list_of_tournaments_played.append(self.tournament_id) self.finished", "description): Tournament.last_tournament_id = get_new_id(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH) self.tournament_id = Tournament.last_tournament_id self.name =", "round number. :param winner: the list of the winners. :return:", "\"\"\" last_tournament_id = \"0\" * TOURNAMENT_ID_WIDTH def __init__(self, name, location,", "= True self.rounds[num_round].memorize_opponents() self.rounds[num_round].rank_players() self.rounds[num_round].end_date = datetime.date.today() def tournament_to_dict(self): \"\"\"", "round played :return: None \"\"\" tour = Round(num_round, self.tournament_id, self.list_of_players)", "end of the tournament. Adds the tournament_id to the players", "player in self.list_of_players: serialized_tournament['list_of_players'].append(player.player_to_dict()) serialized_tournament['start_date'] = str(self.start_date) serialized_tournament['end_date'] = str(self.end_date)", "the players list of tournaments. Defines the attribute finished and", "tournament. :param actors: :return: None \"\"\" for num_player in range(NB_PLAYERS):", "= Tournament.last_tournament_id self.name = name self.location = location self.start_date =", "round number \"num_round\". :param num_round: number of the round played", ":param actors: :return: None \"\"\" for num_player in range(NB_PLAYERS): self.list_of_players.append(Player(actors[num_player],", "the round played :return: None \"\"\" tour = Round(num_round, self.tournament_id,", "of the players who join the tournament. :param actors: :return:", "= \"0\" * TOURNAMENT_ID_WIDTH def __init__(self, name, location, timer_type, description):", "the end of the tournament. Adds the tournament_id to the", "dictionary :return: dictionary of the tournament instance. \"\"\" string_attributes =", "TOURNAMENT_ID_WIDTH def __init__(self, name, location, timer_type, description): Tournament.last_tournament_id = get_new_id(Tournament.last_tournament_id,", "the tournament logic \"\"\" import datetime from chess.utils.utils import get_new_id", "* TOURNAMENT_ID_WIDTH def __init__(self, name, location, timer_type, description): Tournament.last_tournament_id =", "\"\"\" import datetime from chess.utils.utils import get_new_id from chess.models.actors import", "= 8 NB_MATCH = 4 class Tournament: \"\"\" The class", "class Tournament: \"\"\" The class Tournament is the central piece", "attribute finished and the end date of the tournament. \"\"\"", "range(NB_PLAYERS): self.list_of_players.append(Player(actors[num_player], self.tournament_id, num_player)) def init_round(self, num_round): \"\"\" Launches the", "a dictionary :return: dictionary of the tournament instance. \"\"\" string_attributes", "None \"\"\" tour = Round(num_round, self.tournament_id, self.list_of_players) tour.start_date = datetime.date.today()", "\"\"\" string_attributes = ['tournament_id', 'name', 'location', 'timer_type', 'description', 'number_of_rounds', 'players_assigned']", "= {} for attribute in string_attributes: serialized_tournament[attribute] = getattr(self, attribute)", "tournament_id to the players list of tournaments. Defines the attribute", "timer_type, description): Tournament.last_tournament_id = get_new_id(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH) self.tournament_id = Tournament.last_tournament_id self.name", "join the tournament. :param actors: :return: None \"\"\" for num_player", "the list of the winners. :return: None. \"\"\" self.rounds[num_round].register_results(winner) self.rounds[num_round].assign_points()", "[] for r0und in self.rounds: serialized_tournament['rounds'].append(r0und.round_to_dict()) serialized_tournament['list_of_players'] = [] for", "r0und in self.rounds: serialized_tournament['rounds'].append(r0und.round_to_dict()) serialized_tournament['list_of_players'] = [] for player in", "8 NB_MATCH = 4 class Tournament: \"\"\" The class Tournament", "datetime from chess.utils.utils import get_new_id from chess.models.actors import Player from", "def init_round(self, num_round): \"\"\" Launches the round number \"num_round\". :param", "'name', 'location', 'timer_type', 'description', 'number_of_rounds', 'players_assigned'] serialized_tournament = {} for", "number \"num_round\". :param num_round: number of the round played :return:", "location self.start_date = None self.end_date = None self.timer_type = timer_type", "list of the winners. :return: None. \"\"\" self.rounds[num_round].register_results(winner) self.rounds[num_round].assign_points() self.rounds[num_round].finished", "of the winners. :return: None. \"\"\" self.rounds[num_round].register_results(winner) self.rounds[num_round].assign_points() self.rounds[num_round].finished =", "None \"\"\" for num_player in range(NB_PLAYERS): self.list_of_players.append(Player(actors[num_player], self.tournament_id, num_player)) def", "actors): \"\"\" Defines the list of identifier of the players", "winner): \"\"\" Registers the results of the round. :param num_round:", "the round number \"num_round\". :param num_round: number of the round", "played :return: None \"\"\" tour = Round(num_round, self.tournament_id, self.list_of_players) tour.start_date", "num_player in range(NB_PLAYERS): self.list_of_players.append(Player(actors[num_player], self.tournament_id, num_player)) def init_round(self, num_round): \"\"\"", "serialized_tournament['rounds'] = [] for r0und in self.rounds: serialized_tournament['rounds'].append(r0und.round_to_dict()) serialized_tournament['list_of_players'] =", "from chess.models.actors import Player from chess.models.round import Round TOURNAMENT_ID_WIDTH =", "= [] for player in self.list_of_players: serialized_tournament['list_of_players'].append(player.player_to_dict()) serialized_tournament['start_date'] = str(self.start_date)", "\"\"\" Defines the list of identifier of the players who", "string_attributes: serialized_tournament[attribute] = getattr(self, attribute) serialized_tournament['rounds'] = [] for r0und", "import Player from chess.models.round import Round TOURNAMENT_ID_WIDTH = 8 NB_ROUND", "'timer_type', 'description', 'number_of_rounds', 'players_assigned'] serialized_tournament = {} for attribute in", "Registers the results of the round. :param num_round: the round", "= str(self.end_date) return serialized_tournament def end_tournament(self): \"\"\" Handles the end", "in string_attributes: serialized_tournament[attribute] = getattr(self, attribute) serialized_tournament['rounds'] = [] for", "get_new_id from chess.models.actors import Player from chess.models.round import Round TOURNAMENT_ID_WIDTH", "= timer_type self.description = description self.number_of_rounds = NB_ROUND self.rounds =", "= None self.end_date = None self.timer_type = timer_type self.description =", "into a dictionary :return: dictionary of the tournament instance. \"\"\"", "name, location, timer_type, description): Tournament.last_tournament_id = get_new_id(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH) self.tournament_id =", "self.tournament_id, num_player)) def init_round(self, num_round): \"\"\" Launches the round number", "tournaments. Defines the attribute finished and the end date of", "False self.finished = False def define_players(self, actors): \"\"\" Defines the", "[] self.players_assigned = False self.finished = False def define_players(self, actors):", "import datetime from chess.utils.utils import get_new_id from chess.models.actors import Player", "= NB_ROUND self.rounds = [] self.list_of_players = [] self.players_assigned =", "self.rounds[num_round].finished = True self.rounds[num_round].memorize_opponents() self.rounds[num_round].rank_players() self.rounds[num_round].end_date = datetime.date.today() def tournament_to_dict(self):", "NB_ROUND self.rounds = [] self.list_of_players = [] self.players_assigned = False", "'location', 'timer_type', 'description', 'number_of_rounds', 'players_assigned'] serialized_tournament = {} for attribute", "Handles the end of the tournament. Adds the tournament_id to", "results of the round. :param num_round: the round number. :param", "for r0und in self.rounds: serialized_tournament['rounds'].append(r0und.round_to_dict()) serialized_tournament['list_of_players'] = [] for player", "serialized_tournament[attribute] = getattr(self, attribute) serialized_tournament['rounds'] = [] for r0und in", "None self.end_date = None self.timer_type = timer_type self.description = description", "chess.utils.utils import get_new_id from chess.models.actors import Player from chess.models.round import", "chess.models.actors import Player from chess.models.round import Round TOURNAMENT_ID_WIDTH = 8", "for num_player in range(NB_PLAYERS): self.list_of_players.append(Player(actors[num_player], self.tournament_id, num_player)) def init_round(self, num_round):", "winners. :return: None. \"\"\" self.rounds[num_round].register_results(winner) self.rounds[num_round].assign_points() self.rounds[num_round].finished = True self.rounds[num_round].memorize_opponents()", ":return: dictionary of the tournament instance. \"\"\" string_attributes = ['tournament_id',", "for attribute in string_attributes: serialized_tournament[attribute] = getattr(self, attribute) serialized_tournament['rounds'] =", "players list of tournaments. Defines the attribute finished and the", "players who join the tournament. :param actors: :return: None \"\"\"", "str(self.end_date) return serialized_tournament def end_tournament(self): \"\"\" Handles the end of", "for player in self.list_of_players: player.actor.list_of_tournaments_played.append(self.tournament_id) self.finished = True self.end_date =", "= str(self.start_date) serialized_tournament['end_date'] = str(self.end_date) return serialized_tournament def end_tournament(self): \"\"\"", "self.rounds[num_round].end_date = datetime.date.today() def tournament_to_dict(self): \"\"\" Converts the tournament into" ]
[ "raw_input(\"Please enter the maximum number of samples to collect: \")", "Kind: \") servername = raw_input(\"Enter enter Server IP/FQDN: \") serveruid", "range(int(keys_to_monitor)): keys.append(raw_input(\"Enter the key: \")) data = {} if int(maxsamples)", "data[\"server\"] = serverdetails data[\"keys\"] = keys return data # Getting", "enter Adapter Kind: \") resourceKind = raw_input(\"Please enter Resource Kind:", "small python program to setup the configuration environment for data-collect.py", "the python program to gather Metrics from vROps # Author", "keys return data # Getting the path where config.json file", "in range(int(keys_to_monitor)): keys.append(raw_input(\"Enter the key: \")) data = {} if", "= int(maxsamples) serverdetails = {} serverdetails[\"name\"] = servername serverdetails[\"userid\"] =", "Kind: \") resourceKind = raw_input(\"Please enter Resource Kind: \") servername", "data[\"adapterKind\"] = adapterkind data[\"resourceKind\"] = resourceKind data[\"sampleno\"] = int(maxsamples) serverdetails", "fullpath = path+\"/\"+\"config.json\" # Getting the data for the config.json", "get_script_path(): return os.path.dirname(os.path.realpath(sys.argv[0])) def get_the_inputs(): adapterkind = raw_input(\"Please enter Adapter", "path+\"/\"+\"config.json\" # Getting the data for the config.json file final_data", "base64 import os,sys # Getting the absolute path from where", "user id: \") serverpasswd = raw_input(\"Please enter vRops password: \")", "\") serverpasswd = raw_input(\"Please enter vRops password: \") encryptedvar =", "keys_to_monitor = raw_input(\"Please enter the number of keys to monitor:", "# Getting the data for the config.json file final_data =", "to collect: \") keys_to_monitor = raw_input(\"Please enter the number of", "data[\"sampleno\"] = int(maxsamples) serverdetails = {} serverdetails[\"name\"] = servername serverdetails[\"userid\"]", "# Getting the path where config.json file should be kept", "kept path = get_script_path() fullpath = path+\"/\"+\"config.json\" # Getting the", "of keys to monitor: \") keys = [] for i", "collect: \") keys_to_monitor = raw_input(\"Please enter the number of keys", "get_the_inputs() # Saving the data to config.json file with open(fullpath,", "encryptedvar = base64.b64encode(serverpasswd) maxsamples = raw_input(\"Please enter the maximum number", "to monitor: \") keys = [] for i in range(int(keys_to_monitor)):", "1 data[\"adapterKind\"] = adapterkind data[\"resourceKind\"] = resourceKind data[\"sampleno\"] = int(maxsamples)", "IP/FQDN: \") serveruid = raw_input(\"Please enter user id: \") serverpasswd", "enter vRops password: \") encryptedvar = base64.b64encode(serverpasswd) maxsamples = raw_input(\"Please", "= {} serverdetails[\"name\"] = servername serverdetails[\"userid\"] = serveruid serverdetails[\"password\"] =", "\") servername = raw_input(\"Enter enter Server IP/FQDN: \") serveruid =", "json.dump(final_data, outfile, sort_keys = True, indent = 2, separators=(',', ':'),", "<<EMAIL>> # \"\"\" # Importing the required modules import json", "the number of keys to monitor: \") keys = []", "where the script is being run def get_script_path(): return os.path.dirname(os.path.realpath(sys.argv[0]))", "return os.path.dirname(os.path.realpath(sys.argv[0])) def get_the_inputs(): adapterkind = raw_input(\"Please enter Adapter Kind:", "= raw_input(\"Please enter the maximum number of samples to collect:", "adapterkind data[\"resourceKind\"] = resourceKind data[\"sampleno\"] = int(maxsamples) serverdetails = {}", "open(fullpath, 'w') as outfile: json.dump(final_data, outfile, sort_keys = True, indent", "as outfile: json.dump(final_data, outfile, sort_keys = True, indent = 2,", "raw_input(\"Please enter user id: \") serverpasswd = raw_input(\"Please enter vRops", "number of keys to monitor: \") keys = [] for", "from where the script is being run def get_script_path(): return", "samples to collect: \") keys_to_monitor = raw_input(\"Please enter the number", "raw_input(\"Please enter vRops password: \") encryptedvar = base64.b64encode(serverpasswd) maxsamples =", "= [] for i in range(int(keys_to_monitor)): keys.append(raw_input(\"Enter the key: \"))", "run def get_script_path(): return os.path.dirname(os.path.realpath(sys.argv[0])) def get_the_inputs(): adapterkind = raw_input(\"Please", "for the config.json file final_data = get_the_inputs() # Saving the", "{} serverdetails[\"name\"] = servername serverdetails[\"userid\"] = serveruid serverdetails[\"password\"] = encryptedvar", "the script is being run def get_script_path(): return os.path.dirname(os.path.realpath(sys.argv[0])) def", "enter Resource Kind: \") servername = raw_input(\"Enter enter Server IP/FQDN:", "serverdetails[\"name\"] = servername serverdetails[\"userid\"] = serveruid serverdetails[\"password\"] = encryptedvar data[\"server\"]", "servername = raw_input(\"Enter enter Server IP/FQDN: \") serveruid = raw_input(\"Please", "1: maxsamples = 1 data[\"adapterKind\"] = adapterkind data[\"resourceKind\"] = resourceKind", "Resource Kind: \") servername = raw_input(\"Enter enter Server IP/FQDN: \")", "'w') as outfile: json.dump(final_data, outfile, sort_keys = True, indent =", "environment for data-collect.py # data-collect.py contain the python program to", "python \"\"\" # # set-config - a small python program", "base64.b64encode(serverpasswd) maxsamples = raw_input(\"Please enter the maximum number of samples", "serverpasswd = raw_input(\"Please enter vRops password: \") encryptedvar = base64.b64encode(serverpasswd)", "int(maxsamples) serverdetails = {} serverdetails[\"name\"] = servername serverdetails[\"userid\"] = serveruid", "outfile: json.dump(final_data, outfile, sort_keys = True, indent = 2, separators=(',',", "\") resourceKind = raw_input(\"Please enter Resource Kind: \") servername =", "= adapterkind data[\"resourceKind\"] = resourceKind data[\"sampleno\"] = int(maxsamples) serverdetails =", "the absolute path from where the script is being run", "< 1: maxsamples = 1 data[\"adapterKind\"] = adapterkind data[\"resourceKind\"] =", "# data-collect.py contain the python program to gather Metrics from", "# Importing the required modules import json import base64 import", "file with open(fullpath, 'w') as outfile: json.dump(final_data, outfile, sort_keys =", "= 1 data[\"adapterKind\"] = adapterkind data[\"resourceKind\"] = resourceKind data[\"sampleno\"] =", "= raw_input(\"Please enter user id: \") serverpasswd = raw_input(\"Please enter", "enter Server IP/FQDN: \") serveruid = raw_input(\"Please enter user id:", "enter user id: \") serverpasswd = raw_input(\"Please enter vRops password:", "monitor: \") keys = [] for i in range(int(keys_to_monitor)): keys.append(raw_input(\"Enter", "serveruid = raw_input(\"Please enter user id: \") serverpasswd = raw_input(\"Please", "# Saving the data to config.json file with open(fullpath, 'w')", "\"\"\" # Importing the required modules import json import base64", "the maximum number of samples to collect: \") keys_to_monitor =", "config.json file final_data = get_the_inputs() # Saving the data to", "vRops password: \") encryptedvar = base64.b64encode(serverpasswd) maxsamples = raw_input(\"Please enter", "contain the python program to gather Metrics from vROps #", "!/usr/bin python \"\"\" # # set-config - a small python", "configuration environment for data-collect.py # data-collect.py contain the python program", "= base64.b64encode(serverpasswd) maxsamples = raw_input(\"Please enter the maximum number of", "raw_input(\"Please enter Adapter Kind: \") resourceKind = raw_input(\"Please enter Resource", "Author <NAME> <<EMAIL>> # \"\"\" # Importing the required modules", "Getting the path where config.json file should be kept path", "modules import json import base64 import os,sys # Getting the", "= encryptedvar data[\"server\"] = serverdetails data[\"keys\"] = keys return data", "program to setup the configuration environment for data-collect.py # data-collect.py", "password: \") encryptedvar = base64.b64encode(serverpasswd) maxsamples = raw_input(\"Please enter the", "# set-config - a small python program to setup the", "with open(fullpath, 'w') as outfile: json.dump(final_data, outfile, sort_keys = True,", "[] for i in range(int(keys_to_monitor)): keys.append(raw_input(\"Enter the key: \")) data", "outfile, sort_keys = True, indent = 2, separators=(',', ':'), ensure_ascii=False)", "raw_input(\"Please enter the number of keys to monitor: \") keys", "where config.json file should be kept path = get_script_path() fullpath", "a small python program to setup the configuration environment for", "\") keys = [] for i in range(int(keys_to_monitor)): keys.append(raw_input(\"Enter the", "= path+\"/\"+\"config.json\" # Getting the data for the config.json file", "{} if int(maxsamples) < 1: maxsamples = 1 data[\"adapterKind\"] =", "# Getting the absolute path from where the script is", "required modules import json import base64 import os,sys # Getting", "final_data = get_the_inputs() # Saving the data to config.json file", "# Author <NAME> <<EMAIL>> # \"\"\" # Importing the required", "program to gather Metrics from vROps # Author <NAME> <<EMAIL>>", "raw_input(\"Please enter Resource Kind: \") servername = raw_input(\"Enter enter Server", "serverdetails = {} serverdetails[\"name\"] = servername serverdetails[\"userid\"] = serveruid serverdetails[\"password\"]", "number of samples to collect: \") keys_to_monitor = raw_input(\"Please enter", "data for the config.json file final_data = get_the_inputs() # Saving", "= get_script_path() fullpath = path+\"/\"+\"config.json\" # Getting the data for", "# !/usr/bin python \"\"\" # # set-config - a small", "resourceKind data[\"sampleno\"] = int(maxsamples) serverdetails = {} serverdetails[\"name\"] = servername", "data to config.json file with open(fullpath, 'w') as outfile: json.dump(final_data,", "# # set-config - a small python program to setup", "Getting the data for the config.json file final_data = get_the_inputs()", "\") encryptedvar = base64.b64encode(serverpasswd) maxsamples = raw_input(\"Please enter the maximum", "os,sys # Getting the absolute path from where the script", "path = get_script_path() fullpath = path+\"/\"+\"config.json\" # Getting the data", "<NAME> <<EMAIL>> # \"\"\" # Importing the required modules import", "Metrics from vROps # Author <NAME> <<EMAIL>> # \"\"\" #", "Server IP/FQDN: \") serveruid = raw_input(\"Please enter user id: \")", "maximum number of samples to collect: \") keys_to_monitor = raw_input(\"Please", "data = {} if int(maxsamples) < 1: maxsamples = 1", "gather Metrics from vROps # Author <NAME> <<EMAIL>> # \"\"\"", "to config.json file with open(fullpath, 'w') as outfile: json.dump(final_data, outfile,", "from vROps # Author <NAME> <<EMAIL>> # \"\"\" # Importing", "= {} if int(maxsamples) < 1: maxsamples = 1 data[\"adapterKind\"]", "data-collect.py contain the python program to gather Metrics from vROps", "of samples to collect: \") keys_to_monitor = raw_input(\"Please enter the", "json import base64 import os,sys # Getting the absolute path", "config.json file should be kept path = get_script_path() fullpath =", "int(maxsamples) < 1: maxsamples = 1 data[\"adapterKind\"] = adapterkind data[\"resourceKind\"]", "= raw_input(\"Please enter vRops password: \") encryptedvar = base64.b64encode(serverpasswd) maxsamples", "the required modules import json import base64 import os,sys #", "the data to config.json file with open(fullpath, 'w') as outfile:", "\")) data = {} if int(maxsamples) < 1: maxsamples =", "file final_data = get_the_inputs() # Saving the data to config.json", "get_script_path() fullpath = path+\"/\"+\"config.json\" # Getting the data for the", "if int(maxsamples) < 1: maxsamples = 1 data[\"adapterKind\"] = adapterkind", "data[\"keys\"] = keys return data # Getting the path where", "import base64 import os,sys # Getting the absolute path from", "keys = [] for i in range(int(keys_to_monitor)): keys.append(raw_input(\"Enter the key:", "config.json file with open(fullpath, 'w') as outfile: json.dump(final_data, outfile, sort_keys", "script is being run def get_script_path(): return os.path.dirname(os.path.realpath(sys.argv[0])) def get_the_inputs():", "maxsamples = 1 data[\"adapterKind\"] = adapterkind data[\"resourceKind\"] = resourceKind data[\"sampleno\"]", "= resourceKind data[\"sampleno\"] = int(maxsamples) serverdetails = {} serverdetails[\"name\"] =", "to setup the configuration environment for data-collect.py # data-collect.py contain", "serverdetails[\"userid\"] = serveruid serverdetails[\"password\"] = encryptedvar data[\"server\"] = serverdetails data[\"keys\"]", "\") serveruid = raw_input(\"Please enter user id: \") serverpasswd =", "maxsamples = raw_input(\"Please enter the maximum number of samples to", "file should be kept path = get_script_path() fullpath = path+\"/\"+\"config.json\"", "= get_the_inputs() # Saving the data to config.json file with", "resourceKind = raw_input(\"Please enter Resource Kind: \") servername = raw_input(\"Enter", "set-config - a small python program to setup the configuration", "serverdetails data[\"keys\"] = keys return data # Getting the path", "servername serverdetails[\"userid\"] = serveruid serverdetails[\"password\"] = encryptedvar data[\"server\"] = serverdetails", "import os,sys # Getting the absolute path from where the", "= raw_input(\"Please enter the number of keys to monitor: \")", "serveruid serverdetails[\"password\"] = encryptedvar data[\"server\"] = serverdetails data[\"keys\"] = keys", "# \"\"\" # Importing the required modules import json import", "the path where config.json file should be kept path =", "to gather Metrics from vROps # Author <NAME> <<EMAIL>> #", "being run def get_script_path(): return os.path.dirname(os.path.realpath(sys.argv[0])) def get_the_inputs(): adapterkind =", "for i in range(int(keys_to_monitor)): keys.append(raw_input(\"Enter the key: \")) data =", "data-collect.py # data-collect.py contain the python program to gather Metrics", "= raw_input(\"Enter enter Server IP/FQDN: \") serveruid = raw_input(\"Please enter", "os.path.dirname(os.path.realpath(sys.argv[0])) def get_the_inputs(): adapterkind = raw_input(\"Please enter Adapter Kind: \")", "i in range(int(keys_to_monitor)): keys.append(raw_input(\"Enter the key: \")) data = {}", "the data for the config.json file final_data = get_the_inputs() #", "= raw_input(\"Please enter Adapter Kind: \") resourceKind = raw_input(\"Please enter", "enter the maximum number of samples to collect: \") keys_to_monitor", "Importing the required modules import json import base64 import os,sys", "= keys return data # Getting the path where config.json", "absolute path from where the script is being run def", "return data # Getting the path where config.json file should", "= serveruid serverdetails[\"password\"] = encryptedvar data[\"server\"] = serverdetails data[\"keys\"] =", "Saving the data to config.json file with open(fullpath, 'w') as", "= servername serverdetails[\"userid\"] = serveruid serverdetails[\"password\"] = encryptedvar data[\"server\"] =", "is being run def get_script_path(): return os.path.dirname(os.path.realpath(sys.argv[0])) def get_the_inputs(): adapterkind", "python program to setup the configuration environment for data-collect.py #", "the configuration environment for data-collect.py # data-collect.py contain the python", "Getting the absolute path from where the script is being", "\"\"\" # # set-config - a small python program to", "the config.json file final_data = get_the_inputs() # Saving the data", "the key: \")) data = {} if int(maxsamples) < 1:", "vROps # Author <NAME> <<EMAIL>> # \"\"\" # Importing the", "serverdetails[\"password\"] = encryptedvar data[\"server\"] = serverdetails data[\"keys\"] = keys return", "id: \") serverpasswd = raw_input(\"Please enter vRops password: \") encryptedvar", "data[\"resourceKind\"] = resourceKind data[\"sampleno\"] = int(maxsamples) serverdetails = {} serverdetails[\"name\"]", "def get_script_path(): return os.path.dirname(os.path.realpath(sys.argv[0])) def get_the_inputs(): adapterkind = raw_input(\"Please enter", "key: \")) data = {} if int(maxsamples) < 1: maxsamples", "for data-collect.py # data-collect.py contain the python program to gather", "= serverdetails data[\"keys\"] = keys return data # Getting the", "path where config.json file should be kept path = get_script_path()", "data # Getting the path where config.json file should be", "\") keys_to_monitor = raw_input(\"Please enter the number of keys to", "python program to gather Metrics from vROps # Author <NAME>", "import json import base64 import os,sys # Getting the absolute", "setup the configuration environment for data-collect.py # data-collect.py contain the", "- a small python program to setup the configuration environment", "= raw_input(\"Please enter Resource Kind: \") servername = raw_input(\"Enter enter", "get_the_inputs(): adapterkind = raw_input(\"Please enter Adapter Kind: \") resourceKind =", "enter the number of keys to monitor: \") keys =", "keys to monitor: \") keys = [] for i in", "path from where the script is being run def get_script_path():", "raw_input(\"Enter enter Server IP/FQDN: \") serveruid = raw_input(\"Please enter user", "def get_the_inputs(): adapterkind = raw_input(\"Please enter Adapter Kind: \") resourceKind", "be kept path = get_script_path() fullpath = path+\"/\"+\"config.json\" # Getting", "keys.append(raw_input(\"Enter the key: \")) data = {} if int(maxsamples) <", "encryptedvar data[\"server\"] = serverdetails data[\"keys\"] = keys return data #", "should be kept path = get_script_path() fullpath = path+\"/\"+\"config.json\" #", "adapterkind = raw_input(\"Please enter Adapter Kind: \") resourceKind = raw_input(\"Please", "Adapter Kind: \") resourceKind = raw_input(\"Please enter Resource Kind: \")" ]
[ "snippetcompiler.setup_for_snippet( \"\"\" entity Test: number attr end implement Test using", "we are faking it \"\"\" env = {const.ENVIRON_FORCE_TTY: \"true\"} return", "regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available() and with_tty: pytest.skip(\"Colorama is present\") (args,", "in TTY-based terminal into uncolored characters. As such, this package", "not os.path.isdir(non_existing_dir) (args, _) = get_command(tmpdir, stdout_log_level=3, config_dir=non_existing_dir) (stdout, _,", "from subprocess import TimeoutExpired from threading import Timer import pytest", "= os.path.join(log_dir, log_file) with open(log_file, \"r\") as f: log_lines =", "compiled_regex = re.compile(regex) result.append(compiled_regex) return result def is_colorama_package_available(): try: import", "def test_warning_config_dir_option_on_server_command(tmpdir): non_existing_dir = os.path.join(tmpdir, \"non_existing_dir\") assert not os.path.isdir(non_existing_dir) (args,", "+ err assert \"Starting compile\" in all_output assert \"Compile done\"", "Endpoint\"], ), ( 3, False, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "tmp_dir, stdout_log_level=None, log_file=None, log_level_log_file=None, timed=False, dbport=None, dbname=\"inmanta\", dbhost=None, dbuser=None, dbpass=<PASSWORD>,", "timed=False, dbport=None, dbname=\"inmanta\", dbhost=None, dbuser=None, dbpass=<PASSWORD>, config_dir=None, server_extensions=[], version=False, ):", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Server Rest Endpoint\", ], [], ), ( 2, True, True,", "\"log\") state_dir = os.path.join(root_dir, \"data\") for directory in [log_dir, state_dir]:", "run_without_tty(args) assert log_file in os.listdir(log_dir) log_file = os.path.join(log_dir, log_file) with", "\"\"\" entity Test: number attr end implement Test using std::none", "\\x1b\\[34mStarting Server Rest Endpoint\", ], [], ), ( 2, True,", "ANY KIND, either express or implied. See the License for", "r\"\\s*\\* core:\"]), (True, False, [], [r\"Inmanta Service Orchestrator\", r\"Compiler version:", "\\x1b\\[34mStarting Server Rest Endpoint\"], ), ( 3, True, False, [r\"[a-z.]*[", "f.write(\"log-dir=\" + log_dir + \"\\n\") f.write(\"state-dir=\" + state_dir + \"\\n\")", "False return True def test_verify_that_colorama_package_is_not_present(): \"\"\" The colorama package turns", "assert log_file not in os.listdir(log_dir) assert len(stdout) != 0 check_logs(stdout,", "= os.path.join(os.path.dirname(__file__), \"data\", \"bad_module_path\") (stdout, stderr, code) = run_without_tty(args, env={\"PYTHONPATH\":", "f: log_lines = f.readlines() check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed=True) check_logs(stdout, [],", "in text.decode(\"ascii\").split(\"\\n\") if line != \"\"] def do_kill(process, killtime=3, termtime=2):", "process = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=baseenv) return process def", "endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], ), ( 3, False,", "this plugin is broken\" ) in stdout assert code ==", "r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"]), ], ) @pytest.mark.timeout(20) def", "log_dir) def do_run(args, env={}, cwd=None): baseenv = os.environ.copy() baseenv.update(env) process", "_, _) = run_without_tty(args) assert log_file in os.listdir(log_dir) log_file =", "log lines: %s\" % (regex.pattern,)) for regex in compiled_regexes_forbidden_lines: if", "if \"-X\" in cmd: assert \"inmanta.ast.TypeNotFoundException: could not find type", "import pytest import inmanta.util from inmanta import const def get_command(", "convert_to_ascii(text): return [line for line in text.decode(\"ascii\").split(\"\\n\") if line !=", "log_level_log_file=None, timed=False, dbport=None, dbname=\"inmanta\", dbhost=None, dbuser=None, dbpass=<PASSWORD>, config_dir=None, server_extensions=[], version=False,", "stdout @pytest.mark.timeout(20) def test_warning_min_c_option_file_doesnt_exist(snippetcompiler, tmpdir): non_existing_config_file = os.path.join(tmpdir, \"non_existing_config_file\") snippetcompiler.setup_for_snippet(", "]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\", ], [], ), ( 2, False,", "nuber in namespace\" not in str(err) @pytest.mark.timeout(20) def test_warning_config_dir_option_on_server_command(tmpdir): non_existing_dir", "dbuser: f.write(f\"username={dbuser}\\n\") if dbpass: f.write(f\"password={<PASSWORD>\") f.write(\"[server]\\n\") f.write(f\"enabled_extensions={', '.join(server_extensions)}\\n\") args =", "check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed): compiled_regexes_requires_lines = get_compiled_regexes(regexes_required_lines, timed) compiled_regexes_forbidden_lines =", "Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"], []), (False,", "regexes_forbidden_lines): (args, log_dir) = get_command(tmpdir, version=version_should_be_shown) if with_tty: (stdout, _,", "_) = run_without_tty(args) log_file = \"server.log\" assert log_file not in", "lines: %s\" % (regex.pattern,)) for regex in compiled_regexes_forbidden_lines: if any(regex.match(line)", "]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"], ), ( 3, True, [ r\"[a-z.]*[", "f.write(f\"password={<PASSWORD>\") f.write(\"[server]\\n\") f.write(f\"enabled_extensions={', '.join(server_extensions)}\\n\") args = [sys.executable, \"-m\", \"inmanta.app\"] if", "[sys.executable, \"-m\", \"inmanta.app\", \"project\", \"init\", \"-n\", \"test-project\", \"-o\", tmpdir, \"--default\"]", "colorama # noqa: F401 except ModuleNotFoundError: return False return True", "_) = run_without_tty(args) stdout = \"\".join(stdout) assert \"Starting server endpoint\"", "do_run([sys.executable, \"-m\", \"inmanta.app\"] + list(cmd), cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30)", "in os.listdir(log_dir) assert len(stdout) != 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, timed)", "log_dir = os.path.join(root_dir, \"log\") state_dir = os.path.join(root_dir, \"data\") for directory", "was not found in log lines: %s\" % (regex.pattern,)) for", "non_existing_dir = os.path.join(tmpdir, \"non_existing_dir\") assert not os.path.isdir(non_existing_dir) (args, _) =", "msg): def w(): print(msg) func() return w t1 = Timer(killtime,", "so we are faking it \"\"\" env = {const.ENVIRON_FORCE_TTY: \"true\"}", "[], regexes_required_lines, timed=False) def check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed): compiled_regexes_requires_lines =", "not get code for actual tty to run stable in", "True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\", r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting", "in out assert \"STOP\" in out assert \"SHUTDOWN COMPLETE\" in", "value '1234', expected Number (reported in Construct(Test) (./main.cf:8)) \"\"\" )", "doesn't exist\" in all_output @pytest.mark.parametrize_any( \"with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines\", [", "permissions and limitations under the License. Contact: <EMAIL> \"\"\" import", "termtime=10) assert len(stdout) != 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, False) def", "test_minus_x_option(snippetcompiler, cmd): snippetcompiler.setup_for_snippet( \"\"\" entity Test: nuber attr end \"\"\"", "Timer(termtime, do_and_log(process.terminate, \"terminated process\")) t1.start() t2.start() out, err = process.communicate()", "args += [\"-c\", config_file, \"server\"] return (args, log_dir) def do_run(args,", "0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, timed) @pytest.mark.parametrize_any( \"log_level, with_tty, regexes_required_lines, regexes_forbidden_lines\",", "process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\")]) # wait for handler to", "See the License for the specific language governing permissions and", "@pytest.mark.parametrize_any( \"with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines\", [ (False, True, [r\"Inmanta Service", "(stdout, _, _) = run_without_tty(args) assert log_file in os.listdir(log_dir) log_file", "[sys.executable, \"-m\", \"inmanta.app\"] if stdout_log_level: args.append(\"-\" + \"v\" * stdout_log_level)", "= do_run(args, env) return do_kill(process, killtime, termtime) def run_with_tty(args, killtime=3,", "), ( 2, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"], [r\"[a-z.]*[", "= run_with_tty(args, killtime=15, termtime=10) else: (stdout, _, _) = run_without_tty(args,", "dbuser=None, dbpass=<PASSWORD>, config_dir=None, server_extensions=[], version=False, ): root_dir = tmp_dir.mkdir(\"root\").strpath log_dir", "(stdout, stderr, code) = run_without_tty(args, env={\"PYTHONPATH\": pp + \":\" +", "regexes_forbidden_lines, timed=True) check_logs(stdout, [], regexes_required_lines, timed=True) check_logs(stdout, [], regexes_required_lines, timed=False)", "on instance `__config__::Test (instantiated at ./main.cf:8)` \"\"\" \"\"\"(reported in Construct(Test)", "all_output = out + err assert \"Starting compile\" in all_output", "= get_command( tmpdir, dbport=postgres_db.port, dbname=database_name, dbhost=postgres_db.host, dbuser=postgres_db.user, dbpass=<PASSWORD>, server_extensions=[\"badplugin\"], )", "\"Compile done\" in all_output assert f\"Config file {non_existing_config_file} doesn't exist\"", "True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest", ") def test_minus_x_option(snippetcompiler, cmd): snippetcompiler.setup_for_snippet( \"\"\" entity Test: nuber attr", "import TimeoutExpired from threading import Timer import pytest import inmanta.util", "Version 2.0 (the \"License\"); you may not use this file", "process = do_run(args, env) return do_kill(process, killtime, termtime) def run_with_tty(args,", "regexes_required_lines, timed=False) def check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed): compiled_regexes_requires_lines = get_compiled_regexes(regexes_required_lines,", "+= [\"--timed-logs\"] if config_dir: args += [\"--config-dir\", config_dir] if version:", "\"project\", \"init\", \"-n\", \"test-project\", \"-o\", tmpdir, \"--default\"] (stdout, stderr, return_code)", "specific language governing permissions and limitations under the License. Contact:", "\"inmanta.app\"] + cmd, cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert out.decode()", "not use this file except in compliance with the License.", "(stdout, _, _) = run_with_tty(args, killtime=15, termtime=10) else: (stdout, _,", "killtime, termtime) def run_with_tty(args, killtime=3, termtime=2): \"\"\"Could not get code", "= run_without_tty(args, killtime=15, termtime=10) assert len(stdout) != 0 check_logs(stdout, regexes_required_lines,", "% (regex.pattern,)) def test_check_shutdown(): process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\")]) #", "you may not use this file except in compliance with", "process.returncode == 0 out = out.decode() err = err.decode() all_output", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "the License. You may obtain a copy of the License", "could not find type nuber in namespace\" in str(err) else:", "os.path.join(tmpdir, \"non_existing_config_file\") snippetcompiler.setup_for_snippet( \"\"\" entity Test: number attr end \"\"\"", "+= [\"--log-file\", log_file] if log_file and log_level_log_file: args += [\"--log-file-level\",", "log_file = \"server.log\" (args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, log_file=log_file, log_level_log_file=log_level)", "assert len(stdout) != 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, False) def test_init_project(tmpdir):", "dbport else: port = inmanta.util.get_free_tcp_port() with open(config_file, \"w+\", encoding=\"utf-8\") as", "\"Slice badplugin.badslice failed to start because: Too bad, this plugin", "os.path.join(log_dir, log_file) args += [\"--log-file\", log_file] if log_file and log_level_log_file:", "print([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"])", "\"\"\" assert not is_colorama_package_available() @pytest.mark.parametrize_any( \"log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines\",", "log_lines = f.readlines() check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed=True) check_logs(stdout, [], regexes_required_lines,", "\"inmanta ERROR Server setup failed\" in stdout assert ( \"inmanta.server.protocol.SliceStartupException:", "(args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, log_file=log_file, log_level_log_file=log_level) if with_tty: (stdout,", "[\\d]{2}\\:[\\d]{2}\\:[\\d]{2}\\,[\\d]{3}\" def get_compiled_regexes(regexes, timed): result = [] for regex in", "do_run(args, env={}, cwd=None): baseenv = os.environ.copy() baseenv.update(env) process = subprocess.Popen(args,", "state_dir]: os.mkdir(directory) config_file = os.path.join(root_dir, \"inmanta.cfg\") if dbport is not", "test_no_log_file_set(tmpdir, log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available() and with_tty:", "do_kill(process, killtime=3, termtime=1) print(out, err) assert code == 0 assert", "= [sys.executable, \"-m\", \"inmanta.app\", \"project\", \"init\", \"-n\", \"test-project\", \"-o\", tmpdir,", ") in stdout assert code == 4 def test_compiler_exception_output(snippetcompiler): snippetcompiler.setup_for_snippet(", "err assert \"Starting compile\" in all_output assert \"Compile done\" in", "check_logs(stdout, [], regexes_required_lines, timed=True) check_logs(stdout, [], regexes_required_lines, timed=False) def check_logs(log_lines,", "in log_lines: print(line) for regex in compiled_regexes_requires_lines: if not any(regex.match(line)", "\"inmanta.app\"] if stdout_log_level: args.append(\"-\" + \"v\" * stdout_log_level) if log_file:", "os.path.join(root_dir, \"inmanta.cfg\") if dbport is not None: port = dbport", "\"\" if \"-X\" in cmd: assert \"inmanta.ast.TypeNotFoundException: could not find", "= \"\".join(stdout) assert \"Starting server endpoint\" in stdout assert f\"Config", "Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"]), ], ) @pytest.mark.timeout(20)", "with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines): (args, log_dir) = get_command(tmpdir, version=version_should_be_shown) if", "for line in log_lines): pytest.fail(\"Required pattern was not found in", "[]), (False, False, [], [r\"Inmanta Service Orchestrator\", r\"Compiler version: \",", "language governing permissions and limitations under the License. Contact: <EMAIL>", "in Construct(Test) (./main.cf:8)) caused by: Invalid value '1234', expected Number", "out assert not err def test_startup_failure(tmpdir, postgres_db, database_name): (args, log_dir)", "law or agreed to in writing, software distributed under the", "\"data\") for directory in [log_dir, state_dir]: os.mkdir(directory) config_file = os.path.join(root_dir,", "\"miniapp.py\"), \"bad\"]) out, err, code = do_kill(process, killtime=5, termtime=2) print(out,", "args.append(\"-\" + \"v\" * stdout_log_level) if log_file: log_file = os.path.join(log_dir,", "f.write(\"port=\" + str(port) + \"\\n\") f.write(\"name=\" + dbname + \"\\n\")", "+ list(cmd), cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert out.decode() ==", "= process.communicate(timeout=30) assert out.decode() == \"\" assert err.decode() == output", "Thread Dump ----\" in out assert \"STOP\" in out assert", "\"-m\", \"inmanta.app\"] + cmd, cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert", "\"bad_module_path\") (stdout, stderr, code) = run_without_tty(args, env={\"PYTHONPATH\": pp + \":\"", "Rest Endpoint\"], ), ( 3, False, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m", "[ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\", r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server", "regex in regexes: if timed: regex = get_timestamp_regex() + \"", "stderr, code) = run_without_tty(args, env={\"PYTHONPATH\": pp + \":\" + extrapath},", "stderr, return_code) = run_without_tty(args, killtime=15, termtime=10) test_project_path = os.path.join(tmpdir, \"test-project\")", "database_name): (args, log_dir) = get_command( tmpdir, dbport=postgres_db.port, dbname=database_name, dbhost=postgres_db.host, dbuser=postgres_db.user,", "do_kill(process, killtime, termtime) def run_with_tty(args, killtime=3, termtime=2): \"\"\"Could not get", "== 3 assert \"----- Thread Dump ----\" in out assert", "= os.path.join(log_dir, log_file) args += [\"--log-file\", log_file] if log_file and", "assert \"STOP\" not in out assert \"SHUTDOWN COMPLETE\" not in", "subprocess import sys from subprocess import TimeoutExpired from threading import", "_) = run_with_tty(args) else: (stdout, _, _) = run_without_tty(args) log_file", "\"-o\", tmpdir, \"--default\"] (stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10)", "t2.start() out, err = process.communicate() t1.cancel() t2.cancel() stdout = convert_to_ascii(out)", "\"-m\", \"inmanta.app\"] + config_options + [\"compile\"] process = do_run(args, cwd=snippetcompiler.project_dir)", "log_file] if log_file and log_level_log_file: args += [\"--log-file-level\", str(log_level_log_file)] if", "pp + \":\" + extrapath}, killtime=15, termtime=10) assert \"inmanta ERROR", "attr end implement Test using std::none o = Test(attr=\"1234\") \"\"\"", "\"-m\", \"inmanta.app\"] if stdout_log_level: args.append(\"-\" + \"v\" * stdout_log_level) if", "Construct(Test) (./main.cf:8)) \"\"\" ) def exec(*cmd): process = do_run([sys.executable, \"-m\",", "+ extrapath}, killtime=15, termtime=10) assert \"inmanta ERROR Server setup failed\"", "assert not os.path.isdir(non_existing_dir) (args, _) = get_command(tmpdir, stdout_log_level=3, config_dir=non_existing_dir) (stdout,", "termtime=2) print(out, err) assert code == 3 assert \"----- Thread", "using std::none o = Test(attr=\"1234\") \"\"\" ) output = (", "( 3, True, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\",", "server endpoint\", r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\", ], [],", "out assert \"SHUTDOWN COMPLETE\" not in out assert not err", "(args, log_dir) = get_command( tmpdir, dbport=postgres_db.port, dbname=database_name, dbhost=postgres_db.host, dbuser=postgres_db.user, dbpass=<PASSWORD>,", "out, err = process.communicate(timeout=30) assert out.decode() == \"\" if \"-X\"", "[ (False, True, [r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\",", "err.decode() all_output = out + err assert \"Starting compile\" in", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "%s\" % (regex.pattern,)) def test_check_shutdown(): process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\")])", "= [\"-c\", non_existing_config_file, \"-vvv\"] args = [sys.executable, \"-m\", \"inmanta.app\"] +", "termtime=termtime) def get_timestamp_regex(): return r\"[\\d]{4}\\-[\\d]{2}\\-[\\d]{2} [\\d]{2}\\:[\\d]{2}\\:[\\d]{2}\\,[\\d]{3}\" def get_compiled_regexes(regexes, timed): result", "regex compiled_regex = re.compile(regex) result.append(compiled_regex) return result def is_colorama_package_available(): try:", "regexes_required_lines, regexes_forbidden_lines, timed): compiled_regexes_requires_lines = get_compiled_regexes(regexes_required_lines, timed) compiled_regexes_forbidden_lines = get_compiled_regexes(regexes_forbidden_lines,", "\"data\", \"bad_module_path\") (stdout, stderr, code) = run_without_tty(args, env={\"PYTHONPATH\": pp +", "inmanta.util.get_free_tcp_port() with open(config_file, \"w+\", encoding=\"utf-8\") as f: f.write(\"[config]\\n\") f.write(\"log-dir=\" +", "cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=baseenv) return process def convert_to_ascii(text): return [line", "= process.communicate() t1.cancel() t2.cancel() stdout = convert_to_ascii(out) stderr = convert_to_ascii(err)", "compiled_regexes_forbidden_lines: if any(regex.match(line) for line in log_lines): pytest.fail(\"Forbidden pattern found", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "regex in compiled_regexes_requires_lines: if not any(regex.match(line) for line in log_lines):", "port = inmanta.util.get_free_tcp_port() with open(config_file, \"w+\", encoding=\"utf-8\") as f: f.write(\"[config]\\n\")", "`attr` on instance `__config__::Test (instantiated at ./main.cf:8)` \"\"\" \"\"\"(reported in", "found in log lines: %s\" % (regex.pattern,)) for regex in", "if timed: args += [\"--timed-logs\"] if config_dir: args += [\"--config-dir\",", "\"Starting compile\" in all_output assert \"Compile done\" in all_output assert", "by: Invalid value '1234', expected Number (reported in Construct(Test) (./main.cf:8))", "any(regex.match(line) for line in log_lines): pytest.fail(\"Forbidden pattern found in log", "== \"\" if \"-X\" in cmd: assert \"inmanta.ast.TypeNotFoundException: could not", "@pytest.mark.parametrize_any( \"log_level, with_tty, regexes_required_lines, regexes_forbidden_lines\", [ ( 3, False, [", "False, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest", "\"\"\" ) def exec(*cmd): process = do_run([sys.executable, \"-m\", \"inmanta.app\"] +", "could not find type nuber in namespace\" not in str(err)", "pytest.fail(\"Forbidden pattern found in log lines: %s\" % (regex.pattern,)) def", "log_file and log_level_log_file: args += [\"--log-file-level\", str(log_level_log_file)] if timed: args", "all_output assert f\"Config file {non_existing_config_file} doesn't exist\" in all_output @pytest.mark.parametrize_any(", "( 2, True, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server", "\"--default\"] (stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10) test_project_path =", "else: (stdout, _, _) = run_without_tty(args, killtime=15, termtime=10) assert len(stdout)", "\"----- Thread Dump ----\" in out assert \"STOP\" in out", "test_startup_failure(tmpdir, postgres_db, database_name): (args, log_dir) = get_command( tmpdir, dbport=postgres_db.port, dbname=database_name,", "and with_tty: pytest.skip(\"Colorama is present\") log_file = \"server.log\" (args, log_dir)", "text.decode(\"ascii\").split(\"\\n\") if line != \"\"] def do_kill(process, killtime=3, termtime=2): def", "pytest.skip(\"Colorama is present\") log_file = \"server.log\" (args, log_dir) = get_command(tmpdir,", "[ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\",", "except in compliance with the License. You may obtain a", "0 assert len(stderr) == 1 assert \"already exists\" in stderr[0]", "return (args, log_dir) def do_run(args, env={}, cwd=None): baseenv = os.environ.copy()", "to be in place try: process.communicate(timeout=2) except TimeoutExpired: pass process.send_signal(signal.SIGUSR1)", "in out def test_check_bad_shutdown(): print([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) process =", "'1234', expected Number (reported in Construct(Test) (./main.cf:8)) \"\"\" ) def", "open(log_file, \"r\") as f: log_lines = f.readlines() check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines,", "process.communicate(timeout=30) assert out.decode() == \"\" assert err.decode() == output exec(\"compile\")", "badplugin.badslice failed to start because: Too bad, this plugin is", "in all_output assert \"Compile done\" in all_output assert f\"Config file", "def test_warning_min_c_option_file_doesnt_exist(snippetcompiler, tmpdir): non_existing_config_file = os.path.join(tmpdir, \"non_existing_config_file\") snippetcompiler.setup_for_snippet( \"\"\" entity", "get_compiled_regexes(regexes_required_lines, timed) compiled_regexes_forbidden_lines = get_compiled_regexes(regexes_forbidden_lines, timed) for line in log_lines:", "caused by: Invalid value '1234', expected Number (reported in Construct(Test)", "assert code == 3 assert \"----- Thread Dump ----\" in", "{non_existing_config_file} doesn't exist\" in all_output @pytest.mark.parametrize_any( \"with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines\",", ") def exec(*cmd): process = do_run([sys.executable, \"-m\", \"inmanta.app\"] + list(cmd),", "= do_run([sys.executable, \"-m\", \"inmanta.app\"] + list(cmd), cwd=snippetcompiler.project_dir) out, err =", "\"inmanta.cfg\") if dbport is not None: port = dbport else:", "[line for line in text.decode(\"ascii\").split(\"\\n\") if line != \"\"] def", "+ \"\\n\") f.write(\"state-dir=\" + state_dir + \"\\n\") f.write(\"[database]\\n\") f.write(\"port=\" +", "server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"], ), ( 3,", "@pytest.mark.timeout(20) def test_version_argument_is_set(tmpdir, with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines): (args, log_dir) =", "code for actual tty to run stable in docker, so", "if any(regex.match(line) for line in log_lines): pytest.fail(\"Forbidden pattern found in", "[\"--log-file\", log_file] if log_file and log_level_log_file: args += [\"--log-file-level\", str(log_level_log_file)]", "( 2, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting", "Unless required by applicable law or agreed to in writing,", "not err def test_startup_failure(tmpdir, postgres_db, database_name): (args, log_dir) = get_command(", "args += [\"--log-file-level\", str(log_level_log_file)] if timed: args += [\"--timed-logs\"] if", "[r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"], ),", "os.path.exists(test_project_path) (stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10) assert return_code", "), ( 2, False, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\"],", "express or implied. See the License for the specific language", "timed=True) check_logs(stdout, [], regexes_required_lines, timed=True) check_logs(stdout, [], regexes_required_lines, timed=False) def", "cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert process.returncode == 0 out", "killtime=15, termtime=10) assert len(stdout) != 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, False)", "print(msg) func() return w t1 = Timer(killtime, do_and_log(process.kill, \"killed process\"))", ") @pytest.mark.timeout(20) def test_version_argument_is_set(tmpdir, with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines): (args, log_dir)", "do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\")]) # wait for handler to be in", "`__config__::Test (instantiated at ./main.cf:8)` \"\"\" \"\"\"(reported in Construct(Test) (./main.cf:8)) caused", "\"\"\" \"\"\"(reported in Construct(Test) (./main.cf:8)) caused by: Invalid value '1234',", "len(stdout) != 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, timed) @pytest.mark.parametrize_any( \"log_level, with_tty,", "{non_existing_dir} doesn't exist\" in stdout @pytest.mark.timeout(20) def test_warning_min_c_option_file_doesnt_exist(snippetcompiler, tmpdir): non_existing_config_file", "Rest Endpoint\"], ), ( 3, True, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server", "( 3, False, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting", "version: \", r\"Extensions:\", r\"\\s*\\* core:\"], []), (False, False, [], [r\"Inmanta", "directory {non_existing_dir} doesn't exist\" in stdout @pytest.mark.timeout(20) def test_warning_min_c_option_file_doesnt_exist(snippetcompiler, tmpdir):", "return_code) = run_without_tty(args, killtime=15, termtime=10) assert return_code != 0 assert", "= Test(attr=\"1234\") \"\"\" ) output = ( \"\"\"Could not set", "timed) compiled_regexes_forbidden_lines = get_compiled_regexes(regexes_forbidden_lines, timed) for line in log_lines: print(line)", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "line in log_lines): pytest.fail(\"Forbidden pattern found in log lines: %s\"", "], [], ), ( 2, True, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"],", "with open(config_file, \"w+\", encoding=\"utf-8\") as f: f.write(\"[config]\\n\") f.write(\"log-dir=\" + log_dir", "Server Rest Endpoint\", ], [], ), ( 2, True, [r\"[a-z.]*[", "\"non_existing_config_file\") snippetcompiler.setup_for_snippet( \"\"\" entity Test: number attr end \"\"\" )", "2, False, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m", "[\"--timed-logs\"] if config_dir: args += [\"--config-dir\", config_dir] if version: args", "[r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], [],", "f: f.write(\"[config]\\n\") f.write(\"log-dir=\" + log_dir + \"\\n\") f.write(\"state-dir=\" + state_dir", "+= [\"--log-file-level\", str(log_level_log_file)] if timed: args += [\"--timed-logs\"] if config_dir:", "return False return True def test_verify_that_colorama_package_is_not_present(): \"\"\" The colorama package", "\"test-project\") assert return_code == 0 assert os.path.exists(test_project_path) (stdout, stderr, return_code)", "----\" in out assert \"STOP\" in out assert \"SHUTDOWN COMPLETE\"", "(args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, timed=timed) if with_tty: (stdout, _,", "Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"], []), (False, False,", "Dump ----\" in out assert \"STOP\" not in out assert", "\"SHUTDOWN COMPLETE\" not in out assert not err def test_startup_failure(tmpdir,", "= do_kill(process, killtime=3, termtime=1) print(out, err) assert code == 0", "err def test_startup_failure(tmpdir, postgres_db, database_name): (args, log_dir) = get_command( tmpdir,", "with_tty: pytest.skip(\"Colorama is present\") (args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, timed=timed)", "killtime=15, termtime=10) else: (stdout, _, _) = run_without_tty(args, killtime=15, termtime=10)", "endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\", ], [], ), (", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "result.append(compiled_regex) return result def is_colorama_package_available(): try: import colorama # noqa:", "process = do_run(args, cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert process.returncode", "the specific language governing permissions and limitations under the License.", "run_with_tty(args, killtime=15, termtime=10) else: (stdout, _, _) = run_without_tty(args, killtime=15,", "convert_to_ascii(err) return (stdout, stderr, process.returncode) def run_without_tty(args, env={}, killtime=3, termtime=2):", "threading import Timer import pytest import inmanta.util from inmanta import", "or agreed to in writing, software distributed under the License", "regexes_required_lines, regexes_forbidden_lines): (args, log_dir) = get_command(tmpdir, version=version_should_be_shown) if with_tty: (stdout,", "check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed=True) check_logs(stdout, [], regexes_required_lines, timed=True) check_logs(stdout, [],", "with_tty, regexes_required_lines, regexes_forbidden_lines\", [ ( 3, False, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting", "_, _) = run_without_tty(args) stdout = \"\".join(stdout) assert \"Starting server", "return (stdout, stderr, process.returncode) def run_without_tty(args, env={}, killtime=3, termtime=2): process", "dbname + \"\\n\") if dbhost: f.write(f\"host={dbhost}\\n\") if dbuser: f.write(f\"username={dbuser}\\n\") if", "test_verify_that_colorama_package_is_not_present(): \"\"\" The colorama package turns the colored characters in", "test_warning_min_c_option_file_doesnt_exist(snippetcompiler, tmpdir): non_existing_config_file = os.path.join(tmpdir, \"non_existing_config_file\") snippetcompiler.setup_for_snippet( \"\"\" entity Test:", "@pytest.mark.parametrize_any( \"cmd\", [([\"-X\", \"compile\"]), ([\"compile\", \"-X\"]), ([\"compile\"]), ([\"export\", \"-X\"]), ([\"-X\",", "== 0 out = out.decode() err = err.decode() all_output =", "\" \"Slice badplugin.badslice failed to start because: Too bad, this", "Server Rest Endpoint\", ], [], ), ( 2, False, True,", "./main.cf:8)` \"\"\" \"\"\"(reported in Construct(Test) (./main.cf:8)) caused by: Invalid value", "_, _) = run_with_tty(args, killtime=15, termtime=10) else: (stdout, _, _)", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "regexes_forbidden_lines, timed): compiled_regexes_requires_lines = get_compiled_regexes(regexes_required_lines, timed) compiled_regexes_forbidden_lines = get_compiled_regexes(regexes_forbidden_lines, timed)", "= get_compiled_regexes(regexes_forbidden_lines, timed) for line in log_lines: print(line) for regex", "return do_kill(process, killtime, termtime) def run_with_tty(args, killtime=3, termtime=2): \"\"\"Could not", "Server Rest Endpoint\"], ), ( 3, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting", "= get_command(tmpdir, stdout_log_level=log_level, timed=timed) if with_tty: (stdout, _, _) =", "<reponame>inmanta/inmanta-core \"\"\" Copyright 2018 Inmanta Licensed under the Apache License,", "Rest Endpoint\"], [], ), ( 2, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting", "do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) out, err, code = do_kill(process, killtime=5,", "\\x1b\\[34mStarting Server Rest Endpoint\"], ), ], ) @pytest.mark.timeout(20) def test_no_log_file_set(tmpdir,", "TimeoutExpired: pass process.send_signal(signal.SIGUSR1) out, err, code = do_kill(process, killtime=3, termtime=1)", "2, True, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest", "get_command(tmpdir, stdout_log_level=log_level, log_file=log_file, log_level_log_file=log_level) if with_tty: (stdout, _, _) =", "for directory in [log_dir, state_dir]: os.mkdir(directory) config_file = os.path.join(root_dir, \"inmanta.cfg\")", "return w t1 = Timer(killtime, do_and_log(process.kill, \"killed process\")) t2 =", "do_run(args, env) return do_kill(process, killtime, termtime) def run_with_tty(args, killtime=3, termtime=2):", "\"\\n\") f.write(\"state-dir=\" + state_dir + \"\\n\") f.write(\"[database]\\n\") f.write(\"port=\" + str(port)", "out, err, code = do_kill(process, killtime=5, termtime=2) print(out, err) assert", "encoding=\"utf-8\") as f: f.write(\"[config]\\n\") f.write(\"log-dir=\" + log_dir + \"\\n\") f.write(\"state-dir=\"", "are faking it \"\"\" env = {const.ENVIRON_FORCE_TTY: \"true\"} return run_without_tty(args,", "output exec(\"compile\") exec(\"export\", \"-J\", \"out.json\") @pytest.mark.timeout(15) @pytest.mark.parametrize_any( \"cmd\", [([\"-X\", \"compile\"]),", "check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, timed) @pytest.mark.parametrize_any( \"log_level, with_tty, regexes_required_lines, regexes_forbidden_lines\", [", "(stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10) test_project_path = os.path.join(tmpdir,", "\"\"\" import os import re import signal import subprocess import", "t2.cancel() stdout = convert_to_ascii(out) stderr = convert_to_ascii(err) return (stdout, stderr,", "process = do_run([sys.executable, \"-m\", \"inmanta.app\"] + cmd, cwd=snippetcompiler.project_dir) out, err", "entity Test: nuber attr end \"\"\" ) process = do_run([sys.executable,", "\"\\n\") if dbhost: f.write(f\"host={dbhost}\\n\") if dbuser: f.write(f\"username={dbuser}\\n\") if dbpass: f.write(f\"password={<PASSWORD>\")", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "version: \", r\"Extensions:\", r\"\\s*\\* core:\"], []), (True, True, [r\"Inmanta Service", "in all_output @pytest.mark.parametrize_any( \"with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines\", [ (False, True,", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "os.path.join(root_dir, \"log\") state_dir = os.path.join(root_dir, \"data\") for directory in [log_dir,", "run_without_tty(args) stdout = \"\".join(stdout) assert \"Starting server endpoint\" in stdout", "[], ), ( 2, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"],", "with_tty, regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available() and with_tty: pytest.skip(\"Colorama is present\")", "\"killed process\")) t2 = Timer(termtime, do_and_log(process.terminate, \"terminated process\")) t1.start() t2.start()", "+ cmd, cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert out.decode() ==", "end implement Test using std::none o = Test(attr=\"1234\") \"\"\" )", "out assert \"STOP\" in out assert \"SHUTDOWN COMPLETE\" in out", "\"-X\" in cmd: assert \"inmanta.ast.TypeNotFoundException: could not find type nuber", "baseenv.update(env) process = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=baseenv) return process", "Endpoint\"], ), ( 3, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\",", "\"log_level, with_tty, regexes_required_lines, regexes_forbidden_lines\", [ ( 3, False, [ r\"[a-z.]*[", "[\"--log-file-level\", str(log_level_log_file)] if timed: args += [\"--timed-logs\"] if config_dir: args", "in out assert not err def test_startup_failure(tmpdir, postgres_db, database_name): (args,", "True, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest", "([\"-X\", \"export\"]), ([\"export\"])] ) def test_minus_x_option(snippetcompiler, cmd): snippetcompiler.setup_for_snippet( \"\"\" entity", "for regex in compiled_regexes_requires_lines: if not any(regex.match(line) for line in", "in str(err) else: assert \"inmanta.ast.TypeNotFoundException: could not find type nuber", "stderr, return_code) = run_without_tty(args, killtime=15, termtime=10) assert return_code != 0", "True def test_verify_that_colorama_package_is_not_present(): \"\"\" The colorama package turns the colored", "(args, log_dir) def do_run(args, env={}, cwd=None): baseenv = os.environ.copy() baseenv.update(env)", "= [] for regex in regexes: if timed: regex =", "out.decode() == \"\" assert err.decode() == output exec(\"compile\") exec(\"export\", \"-J\",", "def test_init_project(tmpdir): args = [sys.executable, \"-m\", \"inmanta.app\", \"project\", \"init\", \"-n\",", "docker, so we are faking it \"\"\" env = {const.ENVIRON_FORCE_TTY:", "False) def test_init_project(tmpdir): args = [sys.executable, \"-m\", \"inmanta.app\", \"project\", \"init\",", "def test_check_shutdown(): process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\")]) # wait for", "process.send_signal(signal.SIGUSR1) out, err, code = do_kill(process, killtime=3, termtime=1) print(out, err)", "if version: args += [\"--version\"] args += [\"-c\", config_file, \"server\"]", "r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], [], ), ( 2, True,", "= os.path.join(root_dir, \"inmanta.cfg\") if dbport is not None: port =", "COMPLETE\" in out def test_check_bad_shutdown(): print([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) process", "f.write(\"[database]\\n\") f.write(\"port=\" + str(port) + \"\\n\") f.write(\"name=\" + dbname +", "\", r\"Extensions:\", r\"\\s*\\* core:\"]), ], ) @pytest.mark.timeout(20) def test_version_argument_is_set(tmpdir, with_tty,", "t2 = Timer(termtime, do_and_log(process.terminate, \"terminated process\")) t1.start() t2.start() out, err", "in os.listdir(log_dir) log_file = os.path.join(log_dir, log_file) with open(log_file, \"r\") as", "Add a bad module extrapath = os.path.join(os.path.dirname(__file__), \"data\", \"bad_module_path\") (stdout,", "cmd: assert \"inmanta.ast.TypeNotFoundException: could not find type nuber in namespace\"", "not find type nuber in namespace\" in str(err) else: assert", "instance `__config__::Test (instantiated at ./main.cf:8)` \"\"\" \"\"\"(reported in Construct(Test) (./main.cf:8))", "F401 except ModuleNotFoundError: return False return True def test_verify_that_colorama_package_is_not_present(): \"\"\"", "type nuber in namespace\" not in str(err) @pytest.mark.timeout(20) def test_warning_config_dir_option_on_server_command(tmpdir):", "import const def get_command( tmp_dir, stdout_log_level=None, log_file=None, log_level_log_file=None, timed=False, dbport=None,", "type nuber in namespace\" in str(err) else: assert \"inmanta.ast.TypeNotFoundException: could", "pytest import inmanta.util from inmanta import const def get_command( tmp_dir,", "start because: Too bad, this plugin is broken\" ) in", "+ \"\\n\") f.write(\"[database]\\n\") f.write(\"port=\" + str(port) + \"\\n\") f.write(\"name=\" +", "regex in compiled_regexes_forbidden_lines: if any(regex.match(line) for line in log_lines): pytest.fail(\"Forbidden", "under the Apache License, Version 2.0 (the \"License\"); you may", "def convert_to_ascii(text): return [line for line in text.decode(\"ascii\").split(\"\\n\") if line", "attr end \"\"\" ) process = do_run([sys.executable, \"-m\", \"inmanta.app\"] +", "return True def test_verify_that_colorama_package_is_not_present(): \"\"\" The colorama package turns the", "(instantiated at ./main.cf:8)` \"\"\" \"\"\"(reported in Construct(Test) (./main.cf:8)) caused by:", "version=version_should_be_shown) if with_tty: (stdout, _, _) = run_with_tty(args, killtime=15, termtime=10)", "if log_file and log_level_log_file: args += [\"--log-file-level\", str(log_level_log_file)] if timed:", "regexes: if timed: regex = get_timestamp_regex() + \" \" +", "any(regex.match(line) for line in log_lines): pytest.fail(\"Required pattern was not found", "killtime=5, termtime=2) print(out, err) assert code == 3 assert \"-----", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "def w(): print(msg) func() return w t1 = Timer(killtime, do_and_log(process.kill,", "( 3, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+Starting", "\"cmd\", [([\"-X\", \"compile\"]), ([\"compile\", \"-X\"]), ([\"compile\"]), ([\"export\", \"-X\"]), ([\"-X\", \"export\"]),", "(True, False, [], [r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\",", "core:\"], []), (True, True, [r\"Inmanta Service Orchestrator\", r\"Compiler version: \",", "assert process.returncode == 0 out = out.decode() err = err.decode()", "args += [\"--timed-logs\"] if config_dir: args += [\"--config-dir\", config_dir] if", "process\")) t1.start() t2.start() out, err = process.communicate() t1.cancel() t2.cancel() stdout", "str(err) @pytest.mark.timeout(20) def test_warning_config_dir_option_on_server_command(tmpdir): non_existing_dir = os.path.join(tmpdir, \"non_existing_dir\") assert not", "out, err = process.communicate() t1.cancel() t2.cancel() stdout = convert_to_ascii(out) stderr", "+ \"\\n\") f.write(\"name=\" + dbname + \"\\n\") if dbhost: f.write(f\"host={dbhost}\\n\")", "cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert out.decode() == \"\" if", "in log_lines): pytest.fail(\"Required pattern was not found in log lines:", "in writing, software distributed under the License is distributed on", "if config_dir: args += [\"--config-dir\", config_dir] if version: args +=", "required by applicable law or agreed to in writing, software", "line != \"\"] def do_kill(process, killtime=3, termtime=2): def do_and_log(func, msg):", "[\"-c\", config_file, \"server\"] return (args, log_dir) def do_run(args, env={}, cwd=None):", "stdout_log_level=3, config_dir=non_existing_dir) (stdout, _, _) = run_without_tty(args) stdout = \"\".join(stdout)", "non_existing_config_file, \"-vvv\"] args = [sys.executable, \"-m\", \"inmanta.app\"] + config_options +", "process.communicate(timeout=2) except TimeoutExpired: pass process.send_signal(signal.SIGUSR1) out, err, code = do_kill(process,", "0 assert \"----- Thread Dump ----\" in out assert \"STOP\"", "\"inmanta.app\", \"project\", \"init\", \"-n\", \"test-project\", \"-o\", tmpdir, \"--default\"] (stdout, stderr,", "distributed under the License is distributed on an \"AS IS\"", "if with_tty: (stdout, _, _) = run_with_tty(args, killtime=15, termtime=10) else:", "in cmd: assert \"inmanta.ast.TypeNotFoundException: could not find type nuber in", "CONDITIONS OF ANY KIND, either express or implied. See the", "def test_compiler_exception_output(snippetcompiler): snippetcompiler.setup_for_snippet( \"\"\" entity Test: number attr end implement", "f.write(f\"host={dbhost}\\n\") if dbuser: f.write(f\"username={dbuser}\\n\") if dbpass: f.write(f\"password={<PASSWORD>\") f.write(\"[server]\\n\") f.write(f\"enabled_extensions={', '.join(server_extensions)}\\n\")", "out.decode() == \"\" if \"-X\" in cmd: assert \"inmanta.ast.TypeNotFoundException: could", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "termtime) def run_with_tty(args, killtime=3, termtime=2): \"\"\"Could not get code for", "[r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"], ), ( 3, True, [", "\"-X\"]), ([\"compile\"]), ([\"export\", \"-X\"]), ([\"-X\", \"export\"]), ([\"export\"])] ) def test_minus_x_option(snippetcompiler,", "assert len(stdout) != 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, timed) @pytest.mark.parametrize_any( \"log_level,", "dbport=None, dbname=\"inmanta\", dbhost=None, dbuser=None, dbpass=<PASSWORD>, config_dir=None, server_extensions=[], version=False, ): root_dir", "\"inmanta.ast.TypeNotFoundException: could not find type nuber in namespace\" not in", "in compiled_regexes_requires_lines: if not any(regex.match(line) for line in log_lines): pytest.fail(\"Required", "regexes_required_lines, regexes_forbidden_lines, timed=True) check_logs(stdout, [], regexes_required_lines, timed=True) check_logs(stdout, [], regexes_required_lines,", "Copyright 2018 Inmanta Licensed under the Apache License, Version 2.0", "----\" in out assert \"STOP\" not in out assert \"SHUTDOWN", "module extrapath = os.path.join(os.path.dirname(__file__), \"data\", \"bad_module_path\") (stdout, stderr, code) =", "\"inmanta.app\"] + config_options + [\"compile\"] process = do_run(args, cwd=snippetcompiler.project_dir) out,", "str(log_level_log_file)] if timed: args += [\"--timed-logs\"] if config_dir: args +=", "): root_dir = tmp_dir.mkdir(\"root\").strpath log_dir = os.path.join(root_dir, \"log\") state_dir =", "all_output @pytest.mark.parametrize_any( \"with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines\", [ (False, True, [r\"Inmanta", "not be present. \"\"\" assert not is_colorama_package_available() @pytest.mark.parametrize_any( \"log_level, timed,", "open(config_file, \"w+\", encoding=\"utf-8\") as f: f.write(\"[config]\\n\") f.write(\"log-dir=\" + log_dir +", "for handler to be in place try: process.communicate(timeout=2) except TimeoutExpired:", "= do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) out, err, code = do_kill(process,", "test_log_file_set(tmpdir, log_level, with_tty, regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available() and with_tty: pytest.skip(\"Colorama", "([\"compile\"]), ([\"export\", \"-X\"]), ([\"-X\", \"export\"]), ([\"export\"])] ) def test_minus_x_option(snippetcompiler, cmd):", "env) return do_kill(process, killtime, termtime) def run_with_tty(args, killtime=3, termtime=2): \"\"\"Could", "in place try: process.communicate(timeout=2) except TimeoutExpired: pass process.send_signal(signal.SIGUSR1) out, err,", "get_timestamp_regex(): return r\"[\\d]{4}\\-[\\d]{2}\\-[\\d]{2} [\\d]{2}\\:[\\d]{2}\\:[\\d]{2}\\,[\\d]{3}\" def get_compiled_regexes(regexes, timed): result = []", "entity Test: number attr end implement Test using std::none o", "assert \"Starting server endpoint\" in stdout assert f\"Config directory {non_existing_dir}", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "assert f\"Config directory {non_existing_dir} doesn't exist\" in stdout @pytest.mark.timeout(20) def", "# Add a bad module extrapath = os.path.join(os.path.dirname(__file__), \"data\", \"bad_module_path\")", "regexes_forbidden_lines\", [ ( 3, False, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\",", "if timed: regex = get_timestamp_regex() + \" \" + regex", "code == 4 def test_compiler_exception_output(snippetcompiler): snippetcompiler.setup_for_snippet( \"\"\" entity Test: number", "be in place try: process.communicate(timeout=2) except TimeoutExpired: pass process.send_signal(signal.SIGUSR1) out,", "]*DEBUG[\\s]+Starting Server Rest Endpoint\"], [], ), ( 2, True, False,", "== 0 assert \"----- Thread Dump ----\" in out assert", "setup failed\" in stdout assert ( \"inmanta.server.protocol.SliceStartupException: \" \"Slice badplugin.badslice", "os.path.join(root_dir, \"data\") for directory in [log_dir, state_dir]: os.mkdir(directory) config_file =", "log_file not in os.listdir(log_dir) assert len(stdout) != 0 check_logs(stdout, regexes_required_lines,", "\"\" assert err.decode() == output exec(\"compile\") exec(\"export\", \"-J\", \"out.json\") @pytest.mark.timeout(15)", "\"compile\"]), ([\"compile\", \"-X\"]), ([\"compile\"]), ([\"export\", \"-X\"]), ([\"-X\", \"export\"]), ([\"export\"])] )", "err = process.communicate(timeout=30) assert out.decode() == \"\" assert err.decode() ==", "Rest Endpoint\"], ), ], ) @pytest.mark.timeout(60) def test_log_file_set(tmpdir, log_level, with_tty,", "bad, this plugin is broken\" ) in stdout assert code", "Endpoint\"], [], ), ( 2, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server", "== \"\" assert err.decode() == output exec(\"compile\") exec(\"export\", \"-J\", \"out.json\")", "not set attribute `attr` on instance `__config__::Test (instantiated at ./main.cf:8)`", "True, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"],", "because: Too bad, this plugin is broken\" ) in stdout", "subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=baseenv) return process def convert_to_ascii(text): return", "False, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"],", "Endpoint\"], [], ), ( 2, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server", "assert \"inmanta.ast.TypeNotFoundException: could not find type nuber in namespace\" in", "lines: %s\" % (regex.pattern,)) def test_check_shutdown(): process = do_run([sys.executable, os.path.join(os.path.dirname(__file__),", "may not use this file except in compliance with the", "\"\"\" env = {const.ENVIRON_FORCE_TTY: \"true\"} return run_without_tty(args, env=env, killtime=killtime, termtime=termtime)", "broken\" ) in stdout assert code == 4 def test_compiler_exception_output(snippetcompiler):", "print(out, err) assert code == 0 assert \"----- Thread Dump", "f\"Config file {non_existing_config_file} doesn't exist\" in all_output @pytest.mark.parametrize_any( \"with_tty, version_should_be_shown,", "out + err assert \"Starting compile\" in all_output assert \"Compile", "err = process.communicate(timeout=30) assert out.decode() == \"\" if \"-X\" in", "version: \", r\"Extensions:\", r\"\\s*\\* core:\"]), (True, False, [], [r\"Inmanta Service", "in regexes: if timed: regex = get_timestamp_regex() + \" \"", "nuber in namespace\" in str(err) else: assert \"inmanta.ast.TypeNotFoundException: could not", "( \"\"\"Could not set attribute `attr` on instance `__config__::Test (instantiated", "assert code == 0 assert \"----- Thread Dump ----\" in", "tty to run stable in docker, so we are faking", "in namespace\" in str(err) else: assert \"inmanta.ast.TypeNotFoundException: could not find", "3, True, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server", "]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\"],", "file {non_existing_config_file} doesn't exist\" in all_output @pytest.mark.parametrize_any( \"with_tty, version_should_be_shown, regexes_required_lines,", "the License. Contact: <EMAIL> \"\"\" import os import re import", "+ dbname + \"\\n\") if dbhost: f.write(f\"host={dbhost}\\n\") if dbuser: f.write(f\"username={dbuser}\\n\")", "Rest Endpoint\"], ), ( 3, True, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m", "ModuleNotFoundError: return False return True def test_verify_that_colorama_package_is_not_present(): \"\"\" The colorama", "agreed to in writing, software distributed under the License is", "and limitations under the License. Contact: <EMAIL> \"\"\" import os", "def test_no_log_file_set(tmpdir, log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available() and", "with_tty, regexes_required_lines, regexes_forbidden_lines\", [ ( 3, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting", "@pytest.mark.timeout(15) @pytest.mark.parametrize_any( \"cmd\", [([\"-X\", \"compile\"]), ([\"compile\", \"-X\"]), ([\"compile\"]), ([\"export\", \"-X\"]),", "{const.ENVIRON_FORCE_TTY: \"true\"} return run_without_tty(args, env=env, killtime=killtime, termtime=termtime) def get_timestamp_regex(): return", "server_extensions=[], version=False, ): root_dir = tmp_dir.mkdir(\"root\").strpath log_dir = os.path.join(root_dir, \"log\")", "env={\"PYTHONPATH\": pp + \":\" + extrapath}, killtime=15, termtime=10) assert \"inmanta", "if not any(regex.match(line) for line in log_lines): pytest.fail(\"Required pattern was", "not find type nuber in namespace\" not in str(err) @pytest.mark.timeout(20)", "Inmanta Licensed under the Apache License, Version 2.0 (the \"License\");", "\"inmanta.ast.TypeNotFoundException: could not find type nuber in namespace\" in str(err)", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "if dbuser: f.write(f\"username={dbuser}\\n\") if dbpass: f.write(f\"password={<PASSWORD>\") f.write(\"[server]\\n\") f.write(f\"enabled_extensions={', '.join(server_extensions)}\\n\") args", "Test: number attr end implement Test using std::none o =", "config_options = [\"-c\", non_existing_config_file, \"-vvv\"] args = [sys.executable, \"-m\", \"inmanta.app\"]", "out = out.decode() err = err.decode() all_output = out +", "sys from subprocess import TimeoutExpired from threading import Timer import", "turns the colored characters in TTY-based terminal into uncolored characters.", "run_with_tty(args) else: (stdout, _, _) = run_without_tty(args) assert log_file in", "server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], ), ( 3,", "assert log_file in os.listdir(log_dir) log_file = os.path.join(log_dir, log_file) with open(log_file,", "os.path.join(os.path.dirname(__file__), \"data\", \"bad_module_path\") (stdout, stderr, code) = run_without_tty(args, env={\"PYTHONPATH\": pp", "run_without_tty(args, killtime=15, termtime=10) test_project_path = os.path.join(tmpdir, \"test-project\") assert return_code ==", "r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"], []), (False, False, [],", "False, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting", "except TimeoutExpired: pass process.send_signal(signal.SIGUSR1) out, err, code = do_kill(process, killtime=3,", "log_dir) = get_command( tmpdir, dbport=postgres_db.port, dbname=database_name, dbhost=postgres_db.host, dbuser=postgres_db.user, dbpass=<PASSWORD>, server_extensions=[\"badplugin\"],", "]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\"], ), ], ) @pytest.mark.timeout(20) def", "r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"]), (True, False, [], [r\"Inmanta", "return_code == 0 assert os.path.exists(test_project_path) (stdout, stderr, return_code) = run_without_tty(args,", "find type nuber in namespace\" in str(err) else: assert \"inmanta.ast.TypeNotFoundException:", "os.listdir(log_dir) assert len(stdout) != 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, timed) @pytest.mark.parametrize_any(", "limitations under the License. Contact: <EMAIL> \"\"\" import os import", "assert \"inmanta.ast.TypeNotFoundException: could not find type nuber in namespace\" not", "t1.start() t2.start() out, err = process.communicate() t1.cancel() t2.cancel() stdout =", "def run_with_tty(args, killtime=3, termtime=2): \"\"\"Could not get code for actual", "\":\" + extrapath}, killtime=15, termtime=10) assert \"inmanta ERROR Server setup", "], ) @pytest.mark.timeout(60) def test_log_file_set(tmpdir, log_level, with_tty, regexes_required_lines, regexes_forbidden_lines): if", "compliance with the License. You may obtain a copy of", "server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], [], ), (", "config_dir=None, server_extensions=[], version=False, ): root_dir = tmp_dir.mkdir(\"root\").strpath log_dir = os.path.join(root_dir,", "import signal import subprocess import sys from subprocess import TimeoutExpired", "Test using std::none o = Test(attr=\"1234\") \"\"\" ) output =", "= run_without_tty(args, killtime=15, termtime=10) assert return_code != 0 assert len(stderr)", "nuber attr end \"\"\" ) process = do_run([sys.executable, \"-m\", \"inmanta.app\"]", "args += [\"--version\"] args += [\"-c\", config_file, \"server\"] return (args,", "The colorama package turns the colored characters in TTY-based terminal", "log_file) with open(log_file, \"r\") as f: log_lines = f.readlines() check_logs(log_lines,", "3, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server", "Too bad, this plugin is broken\" ) in stdout assert", "end \"\"\" ) config_options = [\"-c\", non_existing_config_file, \"-vvv\"] args =", "inmanta.util from inmanta import const def get_command( tmp_dir, stdout_log_level=None, log_file=None,", "cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert out.decode() == \"\" assert", "regexes_forbidden_lines\", [ ( 3, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\",", "a bad module extrapath = os.path.join(os.path.dirname(__file__), \"data\", \"bad_module_path\") (stdout, stderr,", "), ( 2, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"], [r\"[a-z.]*[", "= do_run([sys.executable, \"-m\", \"inmanta.app\"] + cmd, cwd=snippetcompiler.project_dir) out, err =", "2, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server", "assert return_code != 0 assert len(stderr) == 1 assert \"already", "( 2, False, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[", "in [log_dir, state_dir]: os.mkdir(directory) config_file = os.path.join(root_dir, \"inmanta.cfg\") if dbport", "]*INFO[\\s]+Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], [], ),", "_) = run_with_tty(args, killtime=15, termtime=10) else: (stdout, _, _) =", "3, True, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\", r\"\\x1b\\[36m[a-z.]*[", "_) = run_with_tty(args) else: (stdout, _, _) = run_without_tty(args) assert", "), ( 2, True, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\"],", "with_tty: (stdout, _, _) = run_with_tty(args) else: (stdout, _, _)", "= tmp_dir.mkdir(\"root\").strpath log_dir = os.path.join(root_dir, \"log\") state_dir = os.path.join(root_dir, \"data\")", "import os import re import signal import subprocess import sys", "log_lines: print(line) for regex in compiled_regexes_requires_lines: if not any(regex.match(line) for", "3 assert \"----- Thread Dump ----\" in out assert \"STOP\"", "= Timer(killtime, do_and_log(process.kill, \"killed process\")) t2 = Timer(termtime, do_and_log(process.terminate, \"terminated", "get_compiled_regexes(regexes, timed): result = [] for regex in regexes: if", "termtime=2): process = do_run(args, env) return do_kill(process, killtime, termtime) def", "such, this package should not be present. \"\"\" assert not", "False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"],", "entity Test: number attr end \"\"\" ) config_options = [\"-c\",", "exec(\"export\", \"-J\", \"out.json\") @pytest.mark.timeout(15) @pytest.mark.parametrize_any( \"cmd\", [([\"-X\", \"compile\"]), ([\"compile\", \"-X\"]),", "r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], [], ), ( 2, False,", "assert \"SHUTDOWN COMPLETE\" not in out assert not err def", "endpoint\", r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\", ], [], ),", "([\"export\", \"-X\"]), ([\"-X\", \"export\"]), ([\"export\"])] ) def test_minus_x_option(snippetcompiler, cmd): snippetcompiler.setup_for_snippet(", "endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], ), ( 3, True,", "Endpoint\", ], [], ), ( 2, False, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server", "env = {const.ENVIRON_FORCE_TTY: \"true\"} return run_without_tty(args, env=env, killtime=killtime, termtime=termtime) def", "Server setup failed\" in stdout assert ( \"inmanta.server.protocol.SliceStartupException: \" \"Slice", "return result def is_colorama_package_available(): try: import colorama # noqa: F401", "exec(*cmd): process = do_run([sys.executable, \"-m\", \"inmanta.app\"] + list(cmd), cwd=snippetcompiler.project_dir) out,", "], [], ), ( 2, False, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"],", "this package should not be present. \"\"\" assert not is_colorama_package_available()", "for line in log_lines): pytest.fail(\"Forbidden pattern found in log lines:", "tmp_dir.mkdir(\"root\").strpath log_dir = os.path.join(root_dir, \"log\") state_dir = os.path.join(root_dir, \"data\") for", "dbpass=<PASSWORD>, config_dir=None, server_extensions=[], version=False, ): root_dir = tmp_dir.mkdir(\"root\").strpath log_dir =", "in compiled_regexes_forbidden_lines: if any(regex.match(line) for line in log_lines): pytest.fail(\"Forbidden pattern", "killtime=3, termtime=2): \"\"\"Could not get code for actual tty to", "args = [sys.executable, \"-m\", \"inmanta.app\"] if stdout_log_level: args.append(\"-\" + \"v\"", "return [line for line in text.decode(\"ascii\").split(\"\\n\") if line != \"\"]", "code) = run_without_tty(args, env={\"PYTHONPATH\": pp + \":\" + extrapath}, killtime=15,", "timed=timed) if with_tty: (stdout, _, _) = run_with_tty(args) else: (stdout,", "handler to be in place try: process.communicate(timeout=2) except TimeoutExpired: pass", "]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\", ], [],", "0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, False) def test_init_project(tmpdir): args = [sys.executable,", "def do_and_log(func, msg): def w(): print(msg) func() return w t1", "], ) @pytest.mark.timeout(20) def test_no_log_file_set(tmpdir, log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines):", "for regex in compiled_regexes_forbidden_lines: if any(regex.match(line) for line in log_lines):", "stdout_log_level=log_level, log_file=log_file, log_level_log_file=log_level) if with_tty: (stdout, _, _) = run_with_tty(args)", "postgres_db, database_name): (args, log_dir) = get_command( tmpdir, dbport=postgres_db.port, dbname=database_name, dbhost=postgres_db.host,", "args = [sys.executable, \"-m\", \"inmanta.app\", \"project\", \"init\", \"-n\", \"test-project\", \"-o\",", "done\" in all_output assert f\"Config file {non_existing_config_file} doesn't exist\" in", "uncolored characters. As such, this package should not be present.", "+ \"v\" * stdout_log_level) if log_file: log_file = os.path.join(log_dir, log_file)", "2, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server", "check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, False) def test_init_project(tmpdir): args = [sys.executable, \"-m\",", "Server Rest Endpoint\"], ), ], ) @pytest.mark.timeout(20) def test_no_log_file_set(tmpdir, log_level,", "killtime=killtime, termtime=termtime) def get_timestamp_regex(): return r\"[\\d]{4}\\-[\\d]{2}\\-[\\d]{2} [\\d]{2}\\:[\\d]{2}\\:[\\d]{2}\\,[\\d]{3}\" def get_compiled_regexes(regexes, timed):", "list(cmd), cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert out.decode() == \"\"", "\"-X\"]), ([\"-X\", \"export\"]), ([\"export\"])] ) def test_minus_x_option(snippetcompiler, cmd): snippetcompiler.setup_for_snippet( \"\"\"", "@pytest.mark.timeout(20) def test_warning_min_c_option_file_doesnt_exist(snippetcompiler, tmpdir): non_existing_config_file = os.path.join(tmpdir, \"non_existing_config_file\") snippetcompiler.setup_for_snippet( \"\"\"", "def do_run(args, env={}, cwd=None): baseenv = os.environ.copy() baseenv.update(env) process =", "\\x1b\\[34mStarting server endpoint\", r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\", ],", "Construct(Test) (./main.cf:8)) caused by: Invalid value '1234', expected Number (reported", "License, Version 2.0 (the \"License\"); you may not use this", "os.path.join(tmpdir, \"test-project\") assert return_code == 0 assert os.path.exists(test_project_path) (stdout, stderr,", "code == 0 assert \"----- Thread Dump ----\" in out", "Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"]), (True, False,", "\"----- Thread Dump ----\" in out assert \"STOP\" not in", "is present\") log_file = \"server.log\" (args, log_dir) = get_command(tmpdir, stdout_log_level=log_level,", "\"server.log\" (args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, log_file=log_file, log_level_log_file=log_level) if with_tty:", "len(stdout) != 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, False) def test_init_project(tmpdir): args", "assert out.decode() == \"\" assert err.decode() == output exec(\"compile\") exec(\"export\",", "dbport is not None: port = dbport else: port =", "in stdout assert ( \"inmanta.server.protocol.SliceStartupException: \" \"Slice badplugin.badslice failed to", "regexes_forbidden_lines, False) def test_init_project(tmpdir): args = [sys.executable, \"-m\", \"inmanta.app\", \"project\",", "in log_lines): pytest.fail(\"Forbidden pattern found in log lines: %s\" %", "\", r\"Extensions:\", r\"\\s*\\* core:\"], []), (True, True, [r\"Inmanta Service Orchestrator\",", "process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) out, err, code =", "log_file=None, log_level_log_file=None, timed=False, dbport=None, dbname=\"inmanta\", dbhost=None, dbuser=None, dbpass=<PASSWORD>, config_dir=None, server_extensions=[],", "place try: process.communicate(timeout=2) except TimeoutExpired: pass process.send_signal(signal.SIGUSR1) out, err, code", "], [], ), ( 2, True, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "err = process.communicate() t1.cancel() t2.cancel() stdout = convert_to_ascii(out) stderr =", "\"\".join(stdout) assert \"Starting server endpoint\" in stdout assert f\"Config directory", "+ str(port) + \"\\n\") f.write(\"name=\" + dbname + \"\\n\") if", "regexes_forbidden_lines): if is_colorama_package_available() and with_tty: pytest.skip(\"Colorama is present\") (args, log_dir)", "f.write(\"state-dir=\" + state_dir + \"\\n\") f.write(\"[database]\\n\") f.write(\"port=\" + str(port) +", "exec(\"compile\") exec(\"export\", \"-J\", \"out.json\") @pytest.mark.timeout(15) @pytest.mark.parametrize_any( \"cmd\", [([\"-X\", \"compile\"]), ([\"compile\",", ") process = do_run([sys.executable, \"-m\", \"inmanta.app\"] + cmd, cwd=snippetcompiler.project_dir) out,", "regexes_forbidden_lines, timed) @pytest.mark.parametrize_any( \"log_level, with_tty, regexes_required_lines, regexes_forbidden_lines\", [ ( 3,", "result def is_colorama_package_available(): try: import colorama # noqa: F401 except", "stdout assert f\"Config directory {non_existing_dir} doesn't exist\" in stdout @pytest.mark.timeout(20)", "tmpdir, dbport=postgres_db.port, dbname=database_name, dbhost=postgres_db.host, dbuser=postgres_db.user, dbpass=<PASSWORD>, server_extensions=[\"badplugin\"], ) pp =", "True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server", "dbuser=postgres_db.user, dbpass=<PASSWORD>, server_extensions=[\"badplugin\"], ) pp = \":\".join(sys.path) # Add a", "killtime=3, termtime=1) print(out, err) assert code == 0 assert \"-----", "= get_compiled_regexes(regexes_required_lines, timed) compiled_regexes_forbidden_lines = get_compiled_regexes(regexes_forbidden_lines, timed) for line in", "in out assert \"SHUTDOWN COMPLETE\" not in out assert not", "test_check_shutdown(): process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\")]) # wait for handler", "noqa: F401 except ModuleNotFoundError: return False return True def test_verify_that_colorama_package_is_not_present():", "test_project_path = os.path.join(tmpdir, \"test-project\") assert return_code == 0 assert os.path.exists(test_project_path)", "version_should_be_shown, regexes_required_lines, regexes_forbidden_lines\", [ (False, True, [r\"Inmanta Service Orchestrator\", r\"Compiler", "False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest", "config_dir] if version: args += [\"--version\"] args += [\"-c\", config_file,", "def test_verify_that_colorama_package_is_not_present(): \"\"\" The colorama package turns the colored characters", "]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\", r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\",", "False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest", "line in log_lines: print(line) for regex in compiled_regexes_requires_lines: if not", "= [sys.executable, \"-m\", \"inmanta.app\"] if stdout_log_level: args.append(\"-\" + \"v\" *", "snippetcompiler.setup_for_snippet( \"\"\" entity Test: nuber attr end \"\"\" ) process", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "assert err.decode() == output exec(\"compile\") exec(\"export\", \"-J\", \"out.json\") @pytest.mark.timeout(15) @pytest.mark.parametrize_any(", "compiled_regexes_requires_lines = get_compiled_regexes(regexes_required_lines, timed) compiled_regexes_forbidden_lines = get_compiled_regexes(regexes_forbidden_lines, timed) for line", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "os.path.join(os.path.dirname(__file__), \"miniapp.py\")]) # wait for handler to be in place", "r\"\\s*\\* core:\"]), ], ) @pytest.mark.timeout(20) def test_version_argument_is_set(tmpdir, with_tty, version_should_be_shown, regexes_required_lines,", "killtime=3, termtime=2): def do_and_log(func, msg): def w(): print(msg) func() return", "( 3, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+Starting", "\"-J\", \"out.json\") @pytest.mark.timeout(15) @pytest.mark.parametrize_any( \"cmd\", [([\"-X\", \"compile\"]), ([\"compile\", \"-X\"]), ([\"compile\"]),", "in str(err) @pytest.mark.timeout(20) def test_warning_config_dir_option_on_server_command(tmpdir): non_existing_dir = os.path.join(tmpdir, \"non_existing_dir\") assert", "return_code) = run_without_tty(args, killtime=15, termtime=10) test_project_path = os.path.join(tmpdir, \"test-project\") assert", "get_command(tmpdir, version=version_should_be_shown) if with_tty: (stdout, _, _) = run_with_tty(args, killtime=15,", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "t1 = Timer(killtime, do_and_log(process.kill, \"killed process\")) t2 = Timer(termtime, do_and_log(process.terminate,", "Endpoint\"], ), ( 3, True, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting", "dbname=database_name, dbhost=postgres_db.host, dbuser=postgres_db.user, dbpass=<PASSWORD>, server_extensions=[\"badplugin\"], ) pp = \":\".join(sys.path) #", "log lines: %s\" % (regex.pattern,)) def test_check_shutdown(): process = do_run([sys.executable,", "stdout_log_level) if log_file: log_file = os.path.join(log_dir, log_file) args += [\"--log-file\",", "config_options + [\"compile\"] process = do_run(args, cwd=snippetcompiler.project_dir) out, err =", "under the License is distributed on an \"AS IS\" BASIS,", "stdout assert ( \"inmanta.server.protocol.SliceStartupException: \" \"Slice badplugin.badslice failed to start", "= process.communicate(timeout=30) assert process.returncode == 0 out = out.decode() err", "assert code == 4 def test_compiler_exception_output(snippetcompiler): snippetcompiler.setup_for_snippet( \"\"\" entity Test:", "[\"--config-dir\", config_dir] if version: args += [\"--version\"] args += [\"-c\",", "termtime=10) else: (stdout, _, _) = run_without_tty(args, killtime=15, termtime=10) assert", "timed) @pytest.mark.parametrize_any( \"log_level, with_tty, regexes_required_lines, regexes_forbidden_lines\", [ ( 3, False,", "tmpdir, \"--default\"] (stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10) test_project_path", "2, False, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest", "present\") (args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, timed=timed) if with_tty: (stdout,", "this file except in compliance with the License. You may", "Rest Endpoint\", ], [], ), ( 2, True, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting", "print(out, err) assert code == 3 assert \"----- Thread Dump", "= run_without_tty(args) assert log_file in os.listdir(log_dir) log_file = os.path.join(log_dir, log_file)", "extrapath}, killtime=15, termtime=10) assert \"inmanta ERROR Server setup failed\" in", "import colorama # noqa: F401 except ModuleNotFoundError: return False return", "= run_with_tty(args) else: (stdout, _, _) = run_without_tty(args) log_file =", "\"\"] def do_kill(process, killtime=3, termtime=2): def do_and_log(func, msg): def w():", "doesn't exist\" in stdout @pytest.mark.timeout(20) def test_warning_min_c_option_file_doesnt_exist(snippetcompiler, tmpdir): non_existing_config_file =", "w(): print(msg) func() return w t1 = Timer(killtime, do_and_log(process.kill, \"killed", "file except in compliance with the License. You may obtain", "= os.environ.copy() baseenv.update(env) process = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=baseenv)", "in namespace\" not in str(err) @pytest.mark.timeout(20) def test_warning_config_dir_option_on_server_command(tmpdir): non_existing_dir =", "OR CONDITIONS OF ANY KIND, either express or implied. See", "Rest Endpoint\", ], [], ), ( 2, True, True, [r\"\\x1b\\[32m[a-z.]*[", "\"miniapp.py\")]) # wait for handler to be in place try:", "[\"-c\", non_existing_config_file, \"-vvv\"] args = [sys.executable, \"-m\", \"inmanta.app\"] + config_options", "def get_compiled_regexes(regexes, timed): result = [] for regex in regexes:", "[]), (True, True, [r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\",", "= do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\")]) # wait for handler to be", ") output = ( \"\"\"Could not set attribute `attr` on", "Rest Endpoint\", ], [], ), ( 2, False, True, [r\"\\x1b\\[32m[a-z.]*[", "regexes_required_lines, regexes_forbidden_lines\", [ ( 3, False, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server", "config_file, \"server\"] return (args, log_dir) def do_run(args, env={}, cwd=None): baseenv", "+= [\"--config-dir\", config_dir] if version: args += [\"--version\"] args +=", "is_colorama_package_available(): try: import colorama # noqa: F401 except ModuleNotFoundError: return", "return r\"[\\d]{4}\\-[\\d]{2}\\-[\\d]{2} [\\d]{2}\\:[\\d]{2}\\:[\\d]{2}\\,[\\d]{3}\" def get_compiled_regexes(regexes, timed): result = [] for", "in all_output assert f\"Config file {non_existing_config_file} doesn't exist\" in all_output", "Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"]), (True, False, [],", ") pp = \":\".join(sys.path) # Add a bad module extrapath", "assert \"STOP\" in out assert \"SHUTDOWN COMPLETE\" in out def", "f.write(f\"username={dbuser}\\n\") if dbpass: f.write(f\"password={<PASSWORD>\") f.write(\"[server]\\n\") f.write(f\"enabled_extensions={', '.join(server_extensions)}\\n\") args = [sys.executable,", "# noqa: F401 except ModuleNotFoundError: return False return True def", "= [sys.executable, \"-m\", \"inmanta.app\"] + config_options + [\"compile\"] process =", "\"SHUTDOWN COMPLETE\" in out def test_check_bad_shutdown(): print([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"])", "dbhost=None, dbuser=None, dbpass=<PASSWORD>, config_dir=None, server_extensions=[], version=False, ): root_dir = tmp_dir.mkdir(\"root\").strpath", "(False, True, [r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\*", "!= \"\"] def do_kill(process, killtime=3, termtime=2): def do_and_log(func, msg): def", "== output exec(\"compile\") exec(\"export\", \"-J\", \"out.json\") @pytest.mark.timeout(15) @pytest.mark.parametrize_any( \"cmd\", [([\"-X\",", "root_dir = tmp_dir.mkdir(\"root\").strpath log_dir = os.path.join(root_dir, \"log\") state_dir = os.path.join(root_dir,", "\"\"\"Could not set attribute `attr` on instance `__config__::Test (instantiated at", "set attribute `attr` on instance `__config__::Test (instantiated at ./main.cf:8)` \"\"\"", "termtime=1) print(out, err) assert code == 0 assert \"----- Thread", "[\"--version\"] args += [\"-c\", config_file, \"server\"] return (args, log_dir) def", "stderr, process.returncode) def run_without_tty(args, env={}, killtime=3, termtime=2): process = do_run(args,", "(stdout, stderr, process.returncode) def run_without_tty(args, env={}, killtime=3, termtime=2): process =", "% (regex.pattern,)) for regex in compiled_regexes_forbidden_lines: if any(regex.match(line) for line", "is not None: port = dbport else: port = inmanta.util.get_free_tcp_port()", "\"v\" * stdout_log_level) if log_file: log_file = os.path.join(log_dir, log_file) args", "(./main.cf:8)) caused by: Invalid value '1234', expected Number (reported in", "_, _) = run_with_tty(args) else: (stdout, _, _) = run_without_tty(args)", "2.0 (the \"License\"); you may not use this file except", "if log_file: log_file = os.path.join(log_dir, log_file) args += [\"--log-file\", log_file]", "out.decode() err = err.decode() all_output = out + err assert", "with_tty: pytest.skip(\"Colorama is present\") log_file = \"server.log\" (args, log_dir) =", "err, code = do_kill(process, killtime=3, termtime=1) print(out, err) assert code", "== 4 def test_compiler_exception_output(snippetcompiler): snippetcompiler.setup_for_snippet( \"\"\" entity Test: number attr", "_) = run_without_tty(args, killtime=15, termtime=10) assert len(stdout) != 0 check_logs(stdout,", "use this file except in compliance with the License. You", "server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"], ), ], )", "if stdout_log_level: args.append(\"-\" + \"v\" * stdout_log_level) if log_file: log_file", "2, True, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m", "env={}, cwd=None): baseenv = os.environ.copy() baseenv.update(env) process = subprocess.Popen(args, cwd=cwd,", "server_extensions=[\"badplugin\"], ) pp = \":\".join(sys.path) # Add a bad module", "termtime=10) assert return_code != 0 assert len(stderr) == 1 assert", "True, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\", r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m", "stderr=subprocess.PIPE, env=baseenv) return process def convert_to_ascii(text): return [line for line", "Test(attr=\"1234\") \"\"\" ) output = ( \"\"\"Could not set attribute", "env=baseenv) return process def convert_to_ascii(text): return [line for line in", "compiled_regexes_requires_lines: if not any(regex.match(line) for line in log_lines): pytest.fail(\"Required pattern", "), ], ) @pytest.mark.timeout(60) def test_log_file_set(tmpdir, log_level, with_tty, regexes_required_lines, regexes_forbidden_lines):", "get_command( tmpdir, dbport=postgres_db.port, dbname=database_name, dbhost=postgres_db.host, dbuser=postgres_db.user, dbpass=<PASSWORD>, server_extensions=[\"badplugin\"], ) pp", "= run_without_tty(args, killtime=15, termtime=10) test_project_path = os.path.join(tmpdir, \"test-project\") assert return_code", "\"server.log\" assert log_file not in os.listdir(log_dir) assert len(stdout) != 0", "r\"[\\d]{4}\\-[\\d]{2}\\-[\\d]{2} [\\d]{2}\\:[\\d]{2}\\:[\\d]{2}\\,[\\d]{3}\" def get_compiled_regexes(regexes, timed): result = [] for regex", "[ ( 3, False, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\", r\"[a-z.]*[", "log_file = os.path.join(log_dir, log_file) with open(log_file, \"r\") as f: log_lines", "[r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"]), ],", "([\"compile\", \"-X\"]), ([\"compile\"]), ([\"export\", \"-X\"]), ([\"-X\", \"export\"]), ([\"export\"])] ) def", "version: args += [\"--version\"] args += [\"-c\", config_file, \"server\"] return", "def test_log_file_set(tmpdir, log_level, with_tty, regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available() and with_tty:", "None: port = dbport else: port = inmanta.util.get_free_tcp_port() with open(config_file,", "log_file = os.path.join(log_dir, log_file) args += [\"--log-file\", log_file] if log_file", "True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest", "to start because: Too bad, this plugin is broken\" )", "(the \"License\"); you may not use this file except in", "= \"server.log\" (args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, log_file=log_file, log_level_log_file=log_level) if", "= \"server.log\" assert log_file not in os.listdir(log_dir) assert len(stdout) !=", "), ( 3, True, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\", r\"[a-z.]*[", "os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) out,", "\"-vvv\"] args = [sys.executable, \"-m\", \"inmanta.app\"] + config_options + [\"compile\"]", "log_file) args += [\"--log-file\", log_file] if log_file and log_level_log_file: args", "\"r\") as f: log_lines = f.readlines() check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed=True)", "regexes_required_lines, regexes_forbidden_lines, timed) @pytest.mark.parametrize_any( \"log_level, with_tty, regexes_required_lines, regexes_forbidden_lines\", [ (", "version_should_be_shown, regexes_required_lines, regexes_forbidden_lines): (args, log_dir) = get_command(tmpdir, version=version_should_be_shown) if with_tty:", "for actual tty to run stable in docker, so we", "the Apache License, Version 2.0 (the \"License\"); you may not", "or implied. See the License for the specific language governing", "KIND, either express or implied. See the License for the", "os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) out, err, code = do_kill(process, killtime=5, termtime=2)", "for line in log_lines: print(line) for regex in compiled_regexes_requires_lines: if", "False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"],", "Endpoint\"], ), ], ) @pytest.mark.timeout(20) def test_no_log_file_set(tmpdir, log_level, timed, with_tty,", "= convert_to_ascii(err) return (stdout, stderr, process.returncode) def run_without_tty(args, env={}, killtime=3,", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "os.environ.copy() baseenv.update(env) process = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=baseenv) return", "(regex.pattern,)) for regex in compiled_regexes_forbidden_lines: if any(regex.match(line) for line in", "= inmanta.util.get_free_tcp_port() with open(config_file, \"w+\", encoding=\"utf-8\") as f: f.write(\"[config]\\n\") f.write(\"log-dir=\"", "r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\", ],", "r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"], []), (True, True, [r\"Inmanta", "package turns the colored characters in TTY-based terminal into uncolored", "= get_command(tmpdir, stdout_log_level=3, config_dir=non_existing_dir) (stdout, _, _) = run_without_tty(args) stdout", "killtime=3, termtime=2): process = do_run(args, env) return do_kill(process, killtime, termtime)", "(args, _) = get_command(tmpdir, stdout_log_level=3, config_dir=non_existing_dir) (stdout, _, _) =", "+= [\"-c\", config_file, \"server\"] return (args, log_dir) def do_run(args, env={},", "non_existing_config_file = os.path.join(tmpdir, \"non_existing_config_file\") snippetcompiler.setup_for_snippet( \"\"\" entity Test: number attr", "\"-m\", \"inmanta.app\"] + list(cmd), cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert", "snippetcompiler.setup_for_snippet( \"\"\" entity Test: number attr end \"\"\" ) config_options", "the License for the specific language governing permissions and limitations", "dbport=postgres_db.port, dbname=database_name, dbhost=postgres_db.host, dbuser=postgres_db.user, dbpass=<PASSWORD>, server_extensions=[\"badplugin\"], ) pp = \":\".join(sys.path)", "implied. See the License for the specific language governing permissions", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "[] for regex in regexes: if timed: regex = get_timestamp_regex()", "extrapath = os.path.join(os.path.dirname(__file__), \"data\", \"bad_module_path\") (stdout, stderr, code) = run_without_tty(args,", "0 assert os.path.exists(test_project_path) (stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10)", "assert \"----- Thread Dump ----\" in out assert \"STOP\" not", "number attr end implement Test using std::none o = Test(attr=\"1234\")", "not in out assert not err def test_startup_failure(tmpdir, postgres_db, database_name):", "Endpoint\"], ), ], ) @pytest.mark.timeout(60) def test_log_file_set(tmpdir, log_level, with_tty, regexes_required_lines,", "do_and_log(process.kill, \"killed process\")) t2 = Timer(termtime, do_and_log(process.terminate, \"terminated process\")) t1.start()", "\"STOP\" not in out assert \"SHUTDOWN COMPLETE\" not in out", "stdout_log_level=None, log_file=None, log_level_log_file=None, timed=False, dbport=None, dbname=\"inmanta\", dbhost=None, dbuser=None, dbpass=<PASSWORD>, config_dir=None,", "pp = \":\".join(sys.path) # Add a bad module extrapath =", "f.write(f\"enabled_extensions={', '.join(server_extensions)}\\n\") args = [sys.executable, \"-m\", \"inmanta.app\"] if stdout_log_level: args.append(\"-\"", "code = do_kill(process, killtime=3, termtime=1) print(out, err) assert code ==", "writing, software distributed under the License is distributed on an", "def test_version_argument_is_set(tmpdir, with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines): (args, log_dir) = get_command(tmpdir,", "not found in log lines: %s\" % (regex.pattern,)) for regex", "state_dir + \"\\n\") f.write(\"[database]\\n\") f.write(\"port=\" + str(port) + \"\\n\") f.write(\"name=\"", "\"\"\" entity Test: number attr end \"\"\" ) config_options =", "config_dir=non_existing_dir) (stdout, _, _) = run_without_tty(args) stdout = \"\".join(stdout) assert", "in compliance with the License. You may obtain a copy", "TimeoutExpired from threading import Timer import pytest import inmanta.util from", "def get_timestamp_regex(): return r\"[\\d]{4}\\-[\\d]{2}\\-[\\d]{2} [\\d]{2}\\:[\\d]{2}\\:[\\d]{2}\\,[\\d]{3}\" def get_compiled_regexes(regexes, timed): result =", "characters. As such, this package should not be present. \"\"\"", "endpoint\" in stdout assert f\"Config directory {non_existing_dir} doesn't exist\" in", "inmanta import const def get_command( tmp_dir, stdout_log_level=None, log_file=None, log_level_log_file=None, timed=False,", "+ regex compiled_regex = re.compile(regex) result.append(compiled_regex) return result def is_colorama_package_available():", "os.path.join(log_dir, log_file) with open(log_file, \"r\") as f: log_lines = f.readlines()", "else: port = inmanta.util.get_free_tcp_port() with open(config_file, \"w+\", encoding=\"utf-8\") as f:", "= out.decode() err = err.decode() all_output = out + err", "@pytest.mark.timeout(20) def test_no_log_file_set(tmpdir, log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available()", "[ ( 3, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\", r\"[a-z.]*[", "either express or implied. See the License for the specific", "stdout = \"\".join(stdout) assert \"Starting server endpoint\" in stdout assert", "Server Rest Endpoint\"], [], ), ( 2, True, False, [r\"[a-z.]*[", "\"License\"); you may not use this file except in compliance", "[log_dir, state_dir]: os.mkdir(directory) config_file = os.path.join(root_dir, \"inmanta.cfg\") if dbport is", "= do_run(args, cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert process.returncode ==", "killtime=15, termtime=10) assert \"inmanta ERROR Server setup failed\" in stdout", "plugin is broken\" ) in stdout assert code == 4", "4 def test_compiler_exception_output(snippetcompiler): snippetcompiler.setup_for_snippet( \"\"\" entity Test: number attr end", "= os.path.join(tmpdir, \"non_existing_dir\") assert not os.path.isdir(non_existing_dir) (args, _) = get_command(tmpdir,", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "os.listdir(log_dir) log_file = os.path.join(log_dir, log_file) with open(log_file, \"r\") as f:", "License. Contact: <EMAIL> \"\"\" import os import re import signal", "(stdout, _, _) = run_without_tty(args, killtime=15, termtime=10) assert len(stdout) !=", "timed): result = [] for regex in regexes: if timed:", "r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\", ], [], ), ( 2,", "r\"Extensions:\", r\"\\s*\\* core:\"], []), (True, True, [r\"Inmanta Service Orchestrator\", r\"Compiler", "( 2, True, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[", "args += [\"--config-dir\", config_dir] if version: args += [\"--version\"] args", "]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\"], ), ( 3, True, False,", "( \"inmanta.server.protocol.SliceStartupException: \" \"Slice badplugin.badslice failed to start because: Too", "True, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting", "\"\\n\") f.write(\"[database]\\n\") f.write(\"port=\" + str(port) + \"\\n\") f.write(\"name=\" + dbname", "3, False, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\", r\"\\x1b\\[36m[a-z.]*[", "assert ( \"inmanta.server.protocol.SliceStartupException: \" \"Slice badplugin.badslice failed to start because:", "False, [], [r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\*", "= out + err assert \"Starting compile\" in all_output assert", "assert not is_colorama_package_available() @pytest.mark.parametrize_any( \"log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines\", [", "termtime=10) test_project_path = os.path.join(tmpdir, \"test-project\") assert return_code == 0 assert", "else: (stdout, _, _) = run_without_tty(args) log_file = \"server.log\" assert", ") config_options = [\"-c\", non_existing_config_file, \"-vvv\"] args = [sys.executable, \"-m\",", "= Timer(termtime, do_and_log(process.terminate, \"terminated process\")) t1.start() t2.start() out, err =", "( 2, False, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server", "exist\" in all_output @pytest.mark.parametrize_any( \"with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines\", [ (False,", "\"-m\", \"inmanta.app\", \"project\", \"init\", \"-n\", \"test-project\", \"-o\", tmpdir, \"--default\"] (stdout,", "with open(log_file, \"r\") as f: log_lines = f.readlines() check_logs(log_lines, regexes_required_lines,", "def get_command( tmp_dir, stdout_log_level=None, log_file=None, log_level_log_file=None, timed=False, dbport=None, dbname=\"inmanta\", dbhost=None,", "== 0 assert os.path.exists(test_project_path) (stdout, stderr, return_code) = run_without_tty(args, killtime=15,", "!= 0 assert len(stderr) == 1 assert \"already exists\" in", "is_colorama_package_available() @pytest.mark.parametrize_any( \"log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines\", [ ( 3,", "[], ), ( 2, False, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server", "(False, False, [], [r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\",", "Endpoint\", ], [], ), ( 2, False, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m", "into uncolored characters. As such, this package should not be", "assert \"Starting compile\" in all_output assert \"Compile done\" in all_output", "signal import subprocess import sys from subprocess import TimeoutExpired from", "\\x1b\\[34mStarting server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\"], ),", "not in out assert \"SHUTDOWN COMPLETE\" not in out assert", "timed: regex = get_timestamp_regex() + \" \" + regex compiled_regex", "+ \" \" + regex compiled_regex = re.compile(regex) result.append(compiled_regex) return", "end \"\"\" ) process = do_run([sys.executable, \"-m\", \"inmanta.app\"] + cmd,", "get_compiled_regexes(regexes_forbidden_lines, timed) for line in log_lines: print(line) for regex in", "env={}, killtime=3, termtime=2): process = do_run(args, env) return do_kill(process, killtime,", "under the License. Contact: <EMAIL> \"\"\" import os import re", "regexes_required_lines, regexes_forbidden_lines\", [ ( 3, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server", "in stdout @pytest.mark.timeout(20) def test_warning_min_c_option_file_doesnt_exist(snippetcompiler, tmpdir): non_existing_config_file = os.path.join(tmpdir, \"non_existing_config_file\")", "r\"\\s*\\* core:\"], []), (False, False, [], [r\"Inmanta Service Orchestrator\", r\"Compiler", "termtime=10) assert \"inmanta ERROR Server setup failed\" in stdout assert", "]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\", ], [], ), ( 2,", "expected Number (reported in Construct(Test) (./main.cf:8)) \"\"\" ) def exec(*cmd):", "log_lines): pytest.fail(\"Required pattern was not found in log lines: %s\"", "not in os.listdir(log_dir) assert len(stdout) != 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines,", "= err.decode() all_output = out + err assert \"Starting compile\"", "out, err, code = do_kill(process, killtime=3, termtime=1) print(out, err) assert", "colored characters in TTY-based terminal into uncolored characters. As such,", "(./main.cf:8)) \"\"\" ) def exec(*cmd): process = do_run([sys.executable, \"-m\", \"inmanta.app\"]", "(True, True, [r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\*", "from inmanta import const def get_command( tmp_dir, stdout_log_level=None, log_file=None, log_level_log_file=None,", "stdout assert code == 4 def test_compiler_exception_output(snippetcompiler): snippetcompiler.setup_for_snippet( \"\"\" entity", "args += [\"--log-file\", log_file] if log_file and log_level_log_file: args +=", "= process.communicate(timeout=30) assert out.decode() == \"\" if \"-X\" in cmd:", "log_level, with_tty, regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available() and with_tty: pytest.skip(\"Colorama is", "= ( \"\"\"Could not set attribute `attr` on instance `__config__::Test", "_) = get_command(tmpdir, stdout_log_level=3, config_dir=non_existing_dir) (stdout, _, _) = run_without_tty(args)", "stable in docker, so we are faking it \"\"\" env", "Rest Endpoint\"], [], ), ( 2, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting", "Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"], []), (True,", "stdout_log_level: args.append(\"-\" + \"v\" * stdout_log_level) if log_file: log_file =", "stderr = convert_to_ascii(err) return (stdout, stderr, process.returncode) def run_without_tty(args, env={},", "out def test_check_bad_shutdown(): print([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) process = do_run([sys.executable,", "Dump ----\" in out assert \"STOP\" in out assert \"SHUTDOWN", "def is_colorama_package_available(): try: import colorama # noqa: F401 except ModuleNotFoundError:", "err = err.decode() all_output = out + err assert \"Starting", "if is_colorama_package_available() and with_tty: pytest.skip(\"Colorama is present\") log_file = \"server.log\"", "timed: args += [\"--timed-logs\"] if config_dir: args += [\"--config-dir\", config_dir]", "[([\"-X\", \"compile\"]), ([\"compile\", \"-X\"]), ([\"compile\"]), ([\"export\", \"-X\"]), ([\"-X\", \"export\"]), ([\"export\"])]", "import sys from subprocess import TimeoutExpired from threading import Timer", "test_warning_config_dir_option_on_server_command(tmpdir): non_existing_dir = os.path.join(tmpdir, \"non_existing_dir\") assert not os.path.isdir(non_existing_dir) (args, _)", "Test: number attr end \"\"\" ) config_options = [\"-c\", non_existing_config_file,", "\"\"\"Could not get code for actual tty to run stable", "regexes_required_lines, regexes_forbidden_lines\", [ (False, True, [r\"Inmanta Service Orchestrator\", r\"Compiler version:", "Contact: <EMAIL> \"\"\" import os import re import signal import", "pattern found in log lines: %s\" % (regex.pattern,)) def test_check_shutdown():", "\"bad\"]) out, err, code = do_kill(process, killtime=5, termtime=2) print(out, err)", "endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], [], ), ( 2,", "[], ), ( 2, False, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[", "<EMAIL> \"\"\" import os import re import signal import subprocess", "\"\"\" entity Test: nuber attr end \"\"\" ) process =", "applicable law or agreed to in writing, software distributed under", "out assert \"SHUTDOWN COMPLETE\" in out def test_check_bad_shutdown(): print([sys.executable, os.path.join(os.path.dirname(__file__),", "\"non_existing_dir\") assert not os.path.isdir(non_existing_dir) (args, _) = get_command(tmpdir, stdout_log_level=3, config_dir=non_existing_dir)", "), ( 2, False, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting", "actual tty to run stable in docker, so we are", "Rest Endpoint\"], ), ], ) @pytest.mark.timeout(20) def test_no_log_file_set(tmpdir, log_level, timed,", "assert \"Compile done\" in all_output assert f\"Config file {non_existing_config_file} doesn't", "def do_kill(process, killtime=3, termtime=2): def do_and_log(func, msg): def w(): print(msg)", "def test_minus_x_option(snippetcompiler, cmd): snippetcompiler.setup_for_snippet( \"\"\" entity Test: nuber attr end", "Number (reported in Construct(Test) (./main.cf:8)) \"\"\" ) def exec(*cmd): process", "(args, log_dir) = get_command(tmpdir, version=version_should_be_shown) if with_tty: (stdout, _, _)", "\\x1b\\[34mStarting Server Rest Endpoint\", ], [], ), ( 2, False,", "= get_timestamp_regex() + \" \" + regex compiled_regex = re.compile(regex)", "Endpoint\", ], [], ), ( 2, True, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server", "[], regexes_required_lines, timed=True) check_logs(stdout, [], regexes_required_lines, timed=False) def check_logs(log_lines, regexes_required_lines,", "for regex in regexes: if timed: regex = get_timestamp_regex() +", "(stdout, _, _) = run_without_tty(args) stdout = \"\".join(stdout) assert \"Starting", "]*INFO[\\s]+Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], ), (", "\"server\"] return (args, log_dir) def do_run(args, env={}, cwd=None): baseenv =", "regexes_required_lines, timed=True) check_logs(stdout, [], regexes_required_lines, timed=False) def check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines,", "baseenv = os.environ.copy() baseenv.update(env) process = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,", "OF ANY KIND, either express or implied. See the License", "= do_kill(process, killtime=5, termtime=2) print(out, err) assert code == 3", "], [], ), ( 2, False, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting", "(stdout, _, _) = run_with_tty(args) else: (stdout, _, _) =", "Server Rest Endpoint\"], [], ), ( 2, False, False, [r\"[a-z.]*[", "should not be present. \"\"\" assert not is_colorama_package_available() @pytest.mark.parametrize_any( \"log_level,", "convert_to_ascii(out) stderr = convert_to_ascii(err) return (stdout, stderr, process.returncode) def run_without_tty(args,", "args = [sys.executable, \"-m\", \"inmanta.app\"] + config_options + [\"compile\"] process", "r\"Extensions:\", r\"\\s*\\* core:\"]), ], ) @pytest.mark.timeout(20) def test_version_argument_is_set(tmpdir, with_tty, version_should_be_shown,", "\"init\", \"-n\", \"test-project\", \"-o\", tmpdir, \"--default\"] (stdout, stderr, return_code) =", "line in log_lines): pytest.fail(\"Required pattern was not found in log", "process.communicate(timeout=30) assert process.returncode == 0 out = out.decode() err =", "try: import colorama # noqa: F401 except ModuleNotFoundError: return False", "Server Rest Endpoint\"], ), ], ) @pytest.mark.timeout(60) def test_log_file_set(tmpdir, log_level,", "Test: nuber attr end \"\"\" ) process = do_run([sys.executable, \"-m\",", "o = Test(attr=\"1234\") \"\"\" ) output = ( \"\"\"Could not", "and with_tty: pytest.skip(\"Colorama is present\") (args, log_dir) = get_command(tmpdir, stdout_log_level=log_level,", "= run_without_tty(args, env={\"PYTHONPATH\": pp + \":\" + extrapath}, killtime=15, termtime=10)", "log_dir + \"\\n\") f.write(\"state-dir=\" + state_dir + \"\\n\") f.write(\"[database]\\n\") f.write(\"port=\"", "3, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server", "import subprocess import sys from subprocess import TimeoutExpired from threading", "@pytest.mark.parametrize_any( \"log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines\", [ ( 3, False,", "\"STOP\" in out assert \"SHUTDOWN COMPLETE\" in out def test_check_bad_shutdown():", "get_command(tmpdir, stdout_log_level=3, config_dir=non_existing_dir) (stdout, _, _) = run_without_tty(args) stdout =", "killtime=15, termtime=10) test_project_path = os.path.join(tmpdir, \"test-project\") assert return_code == 0", "\"log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines\", [ ( 3, False, False,", "if dbport is not None: port = dbport else: port", "state_dir = os.path.join(root_dir, \"data\") for directory in [log_dir, state_dir]: os.mkdir(directory)", "_) = run_without_tty(args) assert log_file in os.listdir(log_dir) log_file = os.path.join(log_dir,", "regexes_forbidden_lines): if is_colorama_package_available() and with_tty: pytest.skip(\"Colorama is present\") log_file =", "compiled_regexes_forbidden_lines = get_compiled_regexes(regexes_forbidden_lines, timed) for line in log_lines: print(line) for", "is_colorama_package_available() and with_tty: pytest.skip(\"Colorama is present\") log_file = \"server.log\" (args,", "\" + regex compiled_regex = re.compile(regex) result.append(compiled_regex) return result def", "attr end \"\"\" ) config_options = [\"-c\", non_existing_config_file, \"-vvv\"] args", "Apache License, Version 2.0 (the \"License\"); you may not use", "timed=True) check_logs(stdout, [], regexes_required_lines, timed=False) def check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed):", "assert return_code == 0 assert os.path.exists(test_project_path) (stdout, stderr, return_code) =", "std::none o = Test(attr=\"1234\") \"\"\" ) output = ( \"\"\"Could", "failed\" in stdout assert ( \"inmanta.server.protocol.SliceStartupException: \" \"Slice badplugin.badslice failed", "implement Test using std::none o = Test(attr=\"1234\") \"\"\" ) output", "_, _) = run_without_tty(args) log_file = \"server.log\" assert log_file not", "exist\" in stdout @pytest.mark.timeout(20) def test_warning_min_c_option_file_doesnt_exist(snippetcompiler, tmpdir): non_existing_config_file = os.path.join(tmpdir,", "version=False, ): root_dir = tmp_dir.mkdir(\"root\").strpath log_dir = os.path.join(root_dir, \"log\") state_dir", "timed, with_tty, regexes_required_lines, regexes_forbidden_lines\", [ ( 3, False, False, [r\"[a-z.]*[", "cmd, cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert out.decode() == \"\"", "log_level_log_file=log_level) if with_tty: (stdout, _, _) = run_with_tty(args) else: (stdout,", "As such, this package should not be present. \"\"\" assert", "= get_command(tmpdir, stdout_log_level=log_level, log_file=log_file, log_level_log_file=log_level) if with_tty: (stdout, _, _)", "Server Rest Endpoint\", ], [], ), ( 2, False, [r\"[a-z.]*[", "if with_tty: (stdout, _, _) = run_with_tty(args) else: (stdout, _,", "cmd): snippetcompiler.setup_for_snippet( \"\"\" entity Test: nuber attr end \"\"\" )", "import re import signal import subprocess import sys from subprocess", "r\"\\s*\\* core:\"], []), (True, True, [r\"Inmanta Service Orchestrator\", r\"Compiler version:", "+ log_dir + \"\\n\") f.write(\"state-dir=\" + state_dir + \"\\n\") f.write(\"[database]\\n\")", "test_init_project(tmpdir): args = [sys.executable, \"-m\", \"inmanta.app\", \"project\", \"init\", \"-n\", \"test-project\",", "@pytest.mark.timeout(20) def test_warning_config_dir_option_on_server_command(tmpdir): non_existing_dir = os.path.join(tmpdir, \"non_existing_dir\") assert not os.path.isdir(non_existing_dir)", "+ [\"compile\"] process = do_run(args, cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30)", "\"with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines\", [ (False, True, [r\"Inmanta Service Orchestrator\",", "(stdout, _, _) = run_without_tty(args) log_file = \"server.log\" assert log_file", "endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\"], ), ], )", "dbpass: f.write(f\"password={<PASSWORD>\") f.write(\"[server]\\n\") f.write(f\"enabled_extensions={', '.join(server_extensions)}\\n\") args = [sys.executable, \"-m\", \"inmanta.app\"]", "server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\"], ), (", "failed to start because: Too bad, this plugin is broken\"", "\" \" + regex compiled_regex = re.compile(regex) result.append(compiled_regex) return result", "as f: f.write(\"[config]\\n\") f.write(\"log-dir=\" + log_dir + \"\\n\") f.write(\"state-dir=\" +", "subprocess import TimeoutExpired from threading import Timer import pytest import", "log_dir) = get_command(tmpdir, stdout_log_level=log_level, timed=timed) if with_tty: (stdout, _, _)", "func() return w t1 = Timer(killtime, do_and_log(process.kill, \"killed process\")) t2", "os.path.join(tmpdir, \"non_existing_dir\") assert not os.path.isdir(non_existing_dir) (args, _) = get_command(tmpdir, stdout_log_level=3,", "directory in [log_dir, state_dir]: os.mkdir(directory) config_file = os.path.join(root_dir, \"inmanta.cfg\") if", "termtime=2): \"\"\"Could not get code for actual tty to run", "in log lines: %s\" % (regex.pattern,)) for regex in compiled_regexes_forbidden_lines:", "run_with_tty(args, killtime=3, termtime=2): \"\"\"Could not get code for actual tty", "[sys.executable, \"-m\", \"inmanta.app\"] + config_options + [\"compile\"] process = do_run(args,", "dbhost=postgres_db.host, dbuser=postgres_db.user, dbpass=<PASSWORD>, server_extensions=[\"badplugin\"], ) pp = \":\".join(sys.path) # Add", "f.write(\"[config]\\n\") f.write(\"log-dir=\" + log_dir + \"\\n\") f.write(\"state-dir=\" + state_dir +", "found in log lines: %s\" % (regex.pattern,)) def test_check_shutdown(): process", "f.write(\"[server]\\n\") f.write(f\"enabled_extensions={', '.join(server_extensions)}\\n\") args = [sys.executable, \"-m\", \"inmanta.app\"] if stdout_log_level:", "[], ), ( 2, True, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[", "test_check_bad_shutdown(): print([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"),", "namespace\" in str(err) else: assert \"inmanta.ast.TypeNotFoundException: could not find type", "log_level_log_file: args += [\"--log-file-level\", str(log_level_log_file)] if timed: args += [\"--timed-logs\"]", "process = do_run([sys.executable, \"-m\", \"inmanta.app\"] + list(cmd), cwd=snippetcompiler.project_dir) out, err", "pytest.fail(\"Required pattern was not found in log lines: %s\" %", "out assert \"STOP\" not in out assert \"SHUTDOWN COMPLETE\" not", "test_compiler_exception_output(snippetcompiler): snippetcompiler.setup_for_snippet( \"\"\" entity Test: number attr end implement Test", "def test_check_bad_shutdown(): print([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) process = do_run([sys.executable, os.path.join(os.path.dirname(__file__),", "assert \"inmanta ERROR Server setup failed\" in stdout assert (", "core:\"]), (True, False, [], [r\"Inmanta Service Orchestrator\", r\"Compiler version: \",", "run_without_tty(args, env=env, killtime=killtime, termtime=termtime) def get_timestamp_regex(): return r\"[\\d]{4}\\-[\\d]{2}\\-[\\d]{2} [\\d]{2}\\:[\\d]{2}\\:[\\d]{2}\\,[\\d]{3}\" def", "= f.readlines() check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed=True) check_logs(stdout, [], regexes_required_lines, timed=True)", "@pytest.mark.timeout(60) def test_log_file_set(tmpdir, log_level, with_tty, regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available() and", "err) assert code == 0 assert \"----- Thread Dump ----\"", "True, [r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"],", "[r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"]), (True,", "wait for handler to be in place try: process.communicate(timeout=2) except", "Thread Dump ----\" in out assert \"STOP\" not in out", "\":\".join(sys.path) # Add a bad module extrapath = os.path.join(os.path.dirname(__file__), \"data\",", "= convert_to_ascii(out) stderr = convert_to_ascii(err) return (stdout, stderr, process.returncode) def", "run stable in docker, so we are faking it \"\"\"", "\"-n\", \"test-project\", \"-o\", tmpdir, \"--default\"] (stdout, stderr, return_code) = run_without_tty(args,", "if dbpass: f.write(f\"password={<PASSWORD>\") f.write(\"[server]\\n\") f.write(f\"enabled_extensions={', '.join(server_extensions)}\\n\") args = [sys.executable, \"-m\",", "err = process.communicate(timeout=30) assert process.returncode == 0 out = out.decode()", "faking it \"\"\" env = {const.ENVIRON_FORCE_TTY: \"true\"} return run_without_tty(args, env=env,", "find type nuber in namespace\" not in str(err) @pytest.mark.timeout(20) def", "+ config_options + [\"compile\"] process = do_run(args, cwd=snippetcompiler.project_dir) out, err", "str(port) + \"\\n\") f.write(\"name=\" + dbname + \"\\n\") if dbhost:", "log_file = \"server.log\" assert log_file not in os.listdir(log_dir) assert len(stdout)", "Rest Endpoint\", ], [], ), ( 2, False, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting", "assert not err def test_startup_failure(tmpdir, postgres_db, database_name): (args, log_dir) =", "software distributed under the License is distributed on an \"AS", "tmpdir): non_existing_config_file = os.path.join(tmpdir, \"non_existing_config_file\") snippetcompiler.setup_for_snippet( \"\"\" entity Test: number", "= os.path.join(tmpdir, \"non_existing_config_file\") snippetcompiler.setup_for_snippet( \"\"\" entity Test: number attr end", "Server Rest Endpoint\"], ), ( 3, True, True, [ r\"\\x1b\\[32m[a-z.]*[", "\"\"\" ) output = ( \"\"\"Could not set attribute `attr`", "+ \"\\n\") if dbhost: f.write(f\"host={dbhost}\\n\") if dbuser: f.write(f\"username={dbuser}\\n\") if dbpass:", "not any(regex.match(line) for line in log_lines): pytest.fail(\"Required pattern was not", "\"miniapp.py\"), \"bad\"]) process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) out, err,", "(stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10) assert return_code !=", "!= 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, False) def test_init_project(tmpdir): args =", "= get_command(tmpdir, version=version_should_be_shown) if with_tty: (stdout, _, _) = run_with_tty(args,", "return run_without_tty(args, env=env, killtime=killtime, termtime=termtime) def get_timestamp_regex(): return r\"[\\d]{4}\\-[\\d]{2}\\-[\\d]{2} [\\d]{2}\\:[\\d]{2}\\:[\\d]{2}\\,[\\d]{3}\"", "not in str(err) @pytest.mark.timeout(20) def test_warning_config_dir_option_on_server_command(tmpdir): non_existing_dir = os.path.join(tmpdir, \"non_existing_dir\")", "in stdout assert code == 4 def test_compiler_exception_output(snippetcompiler): snippetcompiler.setup_for_snippet( \"\"\"", "print(line) for regex in compiled_regexes_requires_lines: if not any(regex.match(line) for line", "0 out = out.decode() err = err.decode() all_output = out", "TTY-based terminal into uncolored characters. As such, this package should", "from threading import Timer import pytest import inmanta.util from inmanta", "[], ), ( 2, True, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server", "import inmanta.util from inmanta import const def get_command( tmp_dir, stdout_log_level=None,", "[r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], ), ( 3, False, True,", "line in text.decode(\"ascii\").split(\"\\n\") if line != \"\"] def do_kill(process, killtime=3,", "dbpass=<PASSWORD>, server_extensions=[\"badplugin\"], ) pp = \":\".join(sys.path) # Add a bad", "get_command(tmpdir, stdout_log_level=log_level, timed=timed) if with_tty: (stdout, _, _) = run_with_tty(args)", "\"inmanta.app\"] + list(cmd), cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert out.decode()", "\"true\"} return run_without_tty(args, env=env, killtime=killtime, termtime=termtime) def get_timestamp_regex(): return r\"[\\d]{4}\\-[\\d]{2}\\-[\\d]{2}", "), ], ) @pytest.mark.timeout(20) def test_no_log_file_set(tmpdir, log_level, timed, with_tty, regexes_required_lines,", "endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"], ), ], ) @pytest.mark.timeout(60)", "assert os.path.exists(test_project_path) (stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10) assert", "get_timestamp_regex() + \" \" + regex compiled_regex = re.compile(regex) result.append(compiled_regex)", "process.returncode) def run_without_tty(args, env={}, killtime=3, termtime=2): process = do_run(args, env)", "colorama package turns the colored characters in TTY-based terminal into", "False, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\", r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m", "\", r\"Extensions:\", r\"\\s*\\* core:\"], []), (False, False, [], [r\"Inmanta Service", "\", r\"Extensions:\", r\"\\s*\\* core:\"]), (True, False, [], [r\"Inmanta Service Orchestrator\",", "pytest.skip(\"Colorama is present\") (args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, timed=timed) if", "in log lines: %s\" % (regex.pattern,)) def test_check_shutdown(): process =", "at ./main.cf:8)` \"\"\" \"\"\"(reported in Construct(Test) (./main.cf:8)) caused by: Invalid", "server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\"], ), ],", "pattern was not found in log lines: %s\" % (regex.pattern,))", "* stdout_log_level) if log_file: log_file = os.path.join(log_dir, log_file) args +=", "[r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], ), ( 3, True, True,", "do_kill(process, killtime=3, termtime=2): def do_and_log(func, msg): def w(): print(msg) func()", "server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\", ], [], ),", "is_colorama_package_available() and with_tty: pytest.skip(\"Colorama is present\") (args, log_dir) = get_command(tmpdir,", "to in writing, software distributed under the License is distributed", "server endpoint\" in stdout assert f\"Config directory {non_existing_dir} doesn't exist\"", "do_and_log(func, msg): def w(): print(msg) func() return w t1 =", "regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available() and with_tty: pytest.skip(\"Colorama is present\") log_file", "f.write(\"name=\" + dbname + \"\\n\") if dbhost: f.write(f\"host={dbhost}\\n\") if dbuser:", "if is_colorama_package_available() and with_tty: pytest.skip(\"Colorama is present\") (args, log_dir) =", "else: (stdout, _, _) = run_without_tty(args) assert log_file in os.listdir(log_dir)", "version: \", r\"Extensions:\", r\"\\s*\\* core:\"]), ], ) @pytest.mark.timeout(20) def test_version_argument_is_set(tmpdir,", "to run stable in docker, so we are faking it", "attribute `attr` on instance `__config__::Test (instantiated at ./main.cf:8)` \"\"\" \"\"\"(reported", "assert \"----- Thread Dump ----\" in out assert \"STOP\" in", "bad module extrapath = os.path.join(os.path.dirname(__file__), \"data\", \"bad_module_path\") (stdout, stderr, code)", "log_dir) = get_command(tmpdir, stdout_log_level=log_level, log_file=log_file, log_level_log_file=log_level) if with_tty: (stdout, _,", "for the specific language governing permissions and limitations under the", "\"\"\" ) process = do_run([sys.executable, \"-m\", \"inmanta.app\"] + cmd, cwd=snippetcompiler.project_dir)", "\"w+\", encoding=\"utf-8\") as f: f.write(\"[config]\\n\") f.write(\"log-dir=\" + log_dir + \"\\n\")", "timed) for line in log_lines: print(line) for regex in compiled_regexes_requires_lines:", "number attr end \"\"\" ) config_options = [\"-c\", non_existing_config_file, \"-vvv\"]", "in stdout assert f\"Config directory {non_existing_dir} doesn't exist\" in stdout", "]*DEBUG[\\s]+Starting Server Rest Endpoint\"], ), ( 3, True, True, [", "\"\"\" ) config_options = [\"-c\", non_existing_config_file, \"-vvv\"] args = [sys.executable,", "out, err = process.communicate(timeout=30) assert process.returncode == 0 out =", "log_file=log_file, log_level_log_file=log_level) if with_tty: (stdout, _, _) = run_with_tty(args) else:", "= dbport else: port = inmanta.util.get_free_tcp_port() with open(config_file, \"w+\", encoding=\"utf-8\")", "([\"export\"])] ) def test_minus_x_option(snippetcompiler, cmd): snippetcompiler.setup_for_snippet( \"\"\" entity Test: nuber", "core:\"]), ], ) @pytest.mark.timeout(20) def test_version_argument_is_set(tmpdir, with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines):", "assert \"SHUTDOWN COMPLETE\" in out def test_check_bad_shutdown(): print([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"),", "check_logs(stdout, [], regexes_required_lines, timed=False) def check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed): compiled_regexes_requires_lines", "\"terminated process\")) t1.start() t2.start() out, err = process.communicate() t1.cancel() t2.cancel()", "log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available() and with_tty: pytest.skip(\"Colorama", "compile\" in all_output assert \"Compile done\" in all_output assert f\"Config", "\"Starting server endpoint\" in stdout assert f\"Config directory {non_existing_dir} doesn't", "pass process.send_signal(signal.SIGUSR1) out, err, code = do_kill(process, killtime=3, termtime=1) print(out,", "def run_without_tty(args, env={}, killtime=3, termtime=2): process = do_run(args, env) return", "in docker, so we are faking it \"\"\" env =", "\"test-project\", \"-o\", tmpdir, \"--default\"] (stdout, stderr, return_code) = run_without_tty(args, killtime=15,", "\"export\"]), ([\"export\"])] ) def test_minus_x_option(snippetcompiler, cmd): snippetcompiler.setup_for_snippet( \"\"\" entity Test:", "\"\"\" Copyright 2018 Inmanta Licensed under the Apache License, Version", "Invalid value '1234', expected Number (reported in Construct(Test) (./main.cf:8)) \"\"\"", "+ \":\" + extrapath}, killtime=15, termtime=10) assert \"inmanta ERROR Server", "stdout_log_level=log_level, timed=timed) if with_tty: (stdout, _, _) = run_with_tty(args) else:", "= run_without_tty(args) stdout = \"\".join(stdout) assert \"Starting server endpoint\" in", "config_dir: args += [\"--config-dir\", config_dir] if version: args += [\"--version\"]", "process def convert_to_ascii(text): return [line for line in text.decode(\"ascii\").split(\"\\n\") if", "= run_with_tty(args) else: (stdout, _, _) = run_without_tty(args) assert log_file", "out, err = process.communicate(timeout=30) assert out.decode() == \"\" assert err.decode()", "env=env, killtime=killtime, termtime=termtime) def get_timestamp_regex(): return r\"[\\d]{4}\\-[\\d]{2}\\-[\\d]{2} [\\d]{2}\\:[\\d]{2}\\:[\\d]{2}\\,[\\d]{3}\" def get_compiled_regexes(regexes,", "run_without_tty(args) log_file = \"server.log\" assert log_file not in os.listdir(log_dir) assert", "timed, with_tty, regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available() and with_tty: pytest.skip(\"Colorama is", "dbname=\"inmanta\", dbhost=None, dbuser=None, dbpass=<PASSWORD>, config_dir=None, server_extensions=[], version=False, ): root_dir =", "[r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest", "killtime=15, termtime=10) assert return_code != 0 assert len(stderr) == 1", "endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"], ), ( 3, True,", "run_without_tty(args, killtime=15, termtime=10) assert len(stdout) != 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines,", "with_tty: (stdout, _, _) = run_with_tty(args, killtime=15, termtime=10) else: (stdout,", "log_file in os.listdir(log_dir) log_file = os.path.join(log_dir, log_file) with open(log_file, \"r\")", "try: process.communicate(timeout=2) except TimeoutExpired: pass process.send_signal(signal.SIGUSR1) out, err, code =", "2018 Inmanta Licensed under the Apache License, Version 2.0 (the", "\"\\n\") f.write(\"name=\" + dbname + \"\\n\") if dbhost: f.write(f\"host={dbhost}\\n\") if", "assert out.decode() == \"\" if \"-X\" in cmd: assert \"inmanta.ast.TypeNotFoundException:", "stdout = convert_to_ascii(out) stderr = convert_to_ascii(err) return (stdout, stderr, process.returncode)", "Server Rest Endpoint\"], ), ( 3, False, True, [ r\"\\x1b\\[32m[a-z.]*[", "return process def convert_to_ascii(text): return [line for line in text.decode(\"ascii\").split(\"\\n\")", "]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"], ), (", "]*DEBUG[\\s]+Starting Server Rest Endpoint\"], [], ), ( 2, False, False,", "[r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting Server Rest Endpoint\"], ),", "], ) @pytest.mark.timeout(20) def test_version_argument_is_set(tmpdir, with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines): (args,", "= \":\".join(sys.path) # Add a bad module extrapath = os.path.join(os.path.dirname(__file__),", "test_version_argument_is_set(tmpdir, with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines): (args, log_dir) = get_command(tmpdir, version=version_should_be_shown)", "os.mkdir(directory) config_file = os.path.join(root_dir, \"inmanta.cfg\") if dbport is not None:", "License for the specific language governing permissions and limitations under", "output = ( \"\"\"Could not set attribute `attr` on instance", "process\")) t2 = Timer(termtime, do_and_log(process.terminate, \"terminated process\")) t1.start() t2.start() out,", "), ( 2, True, [r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting", "f.readlines() check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed=True) check_logs(stdout, [], regexes_required_lines, timed=True) check_logs(stdout,", "governing permissions and limitations under the License. Contact: <EMAIL> \"\"\"", "port = dbport else: port = inmanta.util.get_free_tcp_port() with open(config_file, \"w+\",", "(reported in Construct(Test) (./main.cf:8)) \"\"\" ) def exec(*cmd): process =", "r\"Extensions:\", r\"\\s*\\* core:\"], []), (False, False, [], [r\"Inmanta Service Orchestrator\",", "log_dir) = get_command(tmpdir, version=version_should_be_shown) if with_tty: (stdout, _, _) =", "= os.path.join(root_dir, \"data\") for directory in [log_dir, state_dir]: os.mkdir(directory) config_file", "( 2, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+Starting", "Timer(killtime, do_and_log(process.kill, \"killed process\")) t2 = Timer(termtime, do_and_log(process.terminate, \"terminated process\"))", "), ( 3, True, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server", "do_and_log(process.terminate, \"terminated process\")) t1.start() t2.start() out, err = process.communicate() t1.cancel()", "core:\"], []), (False, False, [], [r\"Inmanta Service Orchestrator\", r\"Compiler version:", "timed=False) def check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed): compiled_regexes_requires_lines = get_compiled_regexes(regexes_required_lines, timed)", "+ state_dir + \"\\n\") f.write(\"[database]\\n\") f.write(\"port=\" + str(port) + \"\\n\")", "3, False, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server", "is broken\" ) in stdout assert code == 4 def", "termtime=2): def do_and_log(func, msg): def w(): print(msg) func() return w", "\"bad\"]) process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\"), \"bad\"]) out, err, code", "[r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"], ), ], ) @pytest.mark.timeout(60) def", "in out assert \"SHUTDOWN COMPLETE\" in out def test_check_bad_shutdown(): print([sys.executable,", "str(err) else: assert \"inmanta.ast.TypeNotFoundException: could not find type nuber in", "r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\", r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest", "regex = get_timestamp_regex() + \" \" + regex compiled_regex =", "Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"]), ], )", "present\") log_file = \"server.log\" (args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, log_file=log_file,", "(regex.pattern,)) def test_check_shutdown(): process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), \"miniapp.py\")]) # wait", "( 3, False, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server endpoint\",", "]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"], ), ], ) @pytest.mark.timeout(60) def test_log_file_set(tmpdir,", "in out assert \"STOP\" not in out assert \"SHUTDOWN COMPLETE\"", "stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=baseenv) return process def convert_to_ascii(text): return [line for", "run_without_tty(args, env={}, killtime=3, termtime=2): process = do_run(args, env) return do_kill(process,", "log_lines): pytest.fail(\"Forbidden pattern found in log lines: %s\" % (regex.pattern,))", "[r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"], []),", "terminal into uncolored characters. As such, this package should not", "if line != \"\"] def do_kill(process, killtime=3, termtime=2): def do_and_log(func,", "r\"Extensions:\", r\"\\s*\\* core:\"]), (True, False, [], [r\"Inmanta Service Orchestrator\", r\"Compiler", "if dbhost: f.write(f\"host={dbhost}\\n\") if dbuser: f.write(f\"username={dbuser}\\n\") if dbpass: f.write(f\"password={<PASSWORD>\") f.write(\"[server]\\n\")", "def exec(*cmd): process = do_run([sys.executable, \"-m\", \"inmanta.app\"] + list(cmd), cwd=snippetcompiler.project_dir)", "]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\", ], [], ), ( 2, True,", "\"\"\"(reported in Construct(Test) (./main.cf:8)) caused by: Invalid value '1234', expected", "namespace\" not in str(err) @pytest.mark.timeout(20) def test_warning_config_dir_option_on_server_command(tmpdir): non_existing_dir = os.path.join(tmpdir,", "err, code = do_kill(process, killtime=5, termtime=2) print(out, err) assert code", "it \"\"\" env = {const.ENVIRON_FORCE_TTY: \"true\"} return run_without_tty(args, env=env, killtime=killtime,", "assert f\"Config file {non_existing_config_file} doesn't exist\" in all_output @pytest.mark.parametrize_any( \"with_tty,", "= subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=baseenv) return process def convert_to_ascii(text):", "]*DEBUG[\\s]+Starting Server Rest Endpoint\"], ), ( 3, False, True, [", "[r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\"], ), ], ) @pytest.mark.timeout(20)", "= re.compile(regex) result.append(compiled_regex) return result def is_colorama_package_available(): try: import colorama", "not is_colorama_package_available() @pytest.mark.parametrize_any( \"log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines\", [ (", "by applicable law or agreed to in writing, software distributed", "Timer import pytest import inmanta.util from inmanta import const def", "package should not be present. \"\"\" assert not is_colorama_package_available() @pytest.mark.parametrize_any(", "\"inmanta.server.protocol.SliceStartupException: \" \"Slice badplugin.badslice failed to start because: Too bad,", "[], [r\"Inmanta Service Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"]),", "present. \"\"\" assert not is_colorama_package_available() @pytest.mark.parametrize_any( \"log_level, timed, with_tty, regexes_required_lines,", "re import signal import subprocess import sys from subprocess import", "config_file = os.path.join(root_dir, \"inmanta.cfg\") if dbport is not None: port", "import Timer import pytest import inmanta.util from inmanta import const", "!= 0 check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, timed) @pytest.mark.parametrize_any( \"log_level, with_tty, regexes_required_lines,", "dbhost: f.write(f\"host={dbhost}\\n\") if dbuser: f.write(f\"username={dbuser}\\n\") if dbpass: f.write(f\"password={<PASSWORD>\") f.write(\"[server]\\n\") f.write(f\"enabled_extensions={',", "= run_without_tty(args) log_file = \"server.log\" assert log_file not in os.listdir(log_dir)", ") @pytest.mark.timeout(20) def test_no_log_file_set(tmpdir, log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines): if", "= os.path.join(tmpdir, \"test-project\") assert return_code == 0 assert os.path.exists(test_project_path) (stdout,", "run_without_tty(args, env={\"PYTHONPATH\": pp + \":\" + extrapath}, killtime=15, termtime=10) assert", "cwd=None): baseenv = os.environ.copy() baseenv.update(env) process = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE,", "Endpoint\"], ), ( 3, True, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\",", "os import re import signal import subprocess import sys from", "# wait for handler to be in place try: process.communicate(timeout=2)", "and log_level_log_file: args += [\"--log-file-level\", str(log_level_log_file)] if timed: args +=", "), ( 3, False, True, [ r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m \\x1b\\[34mStarting server", "'.join(server_extensions)}\\n\") args = [sys.executable, \"-m\", \"inmanta.app\"] if stdout_log_level: args.append(\"-\" +", "code = do_kill(process, killtime=5, termtime=2) print(out, err) assert code ==", "be present. \"\"\" assert not is_colorama_package_available() @pytest.mark.parametrize_any( \"log_level, timed, with_tty,", "), ( 3, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\", r\"[a-z.]*[", "err) assert code == 3 assert \"----- Thread Dump ----\"", "is present\") (args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, timed=timed) if with_tty:", "os.path.isdir(non_existing_dir) (args, _) = get_command(tmpdir, stdout_log_level=3, config_dir=non_existing_dir) (stdout, _, _)", "COMPLETE\" not in out assert not err def test_startup_failure(tmpdir, postgres_db,", "+= [\"--version\"] args += [\"-c\", config_file, \"server\"] return (args, log_dir)", "= {const.ENVIRON_FORCE_TTY: \"true\"} return run_without_tty(args, env=env, killtime=killtime, termtime=termtime) def get_timestamp_regex():", "as f: log_lines = f.readlines() check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed=True) check_logs(stdout,", "with the License. You may obtain a copy of the", "for line in text.decode(\"ascii\").split(\"\\n\") if line != \"\"] def do_kill(process,", "in Construct(Test) (./main.cf:8)) \"\"\" ) def exec(*cmd): process = do_run([sys.executable,", "code == 3 assert \"----- Thread Dump ----\" in out", "not None: port = dbport else: port = inmanta.util.get_free_tcp_port() with", "const def get_command( tmp_dir, stdout_log_level=None, log_file=None, log_level_log_file=None, timed=False, dbport=None, dbname=\"inmanta\",", "f\"Config directory {non_existing_dir} doesn't exist\" in stdout @pytest.mark.timeout(20) def test_warning_min_c_option_file_doesnt_exist(snippetcompiler,", "do_run([sys.executable, \"-m\", \"inmanta.app\"] + cmd, cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30)", "%s\" % (regex.pattern,)) for regex in compiled_regexes_forbidden_lines: if any(regex.match(line) for", "r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\", ], [], ), (", "t1.cancel() t2.cancel() stdout = convert_to_ascii(out) stderr = convert_to_ascii(err) return (stdout,", "the colored characters in TTY-based terminal into uncolored characters. As", "process.communicate(timeout=30) assert out.decode() == \"\" if \"-X\" in cmd: assert", "log_file: log_file = os.path.join(log_dir, log_file) args += [\"--log-file\", log_file] if", "re.compile(regex) result.append(compiled_regex) return result def is_colorama_package_available(): try: import colorama #", "( 3, True, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\", r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting", "ERROR Server setup failed\" in stdout assert ( \"inmanta.server.protocol.SliceStartupException: \"", "regexes_forbidden_lines\", [ (False, True, [r\"Inmanta Service Orchestrator\", r\"Compiler version: \",", "process.communicate() t1.cancel() t2.cancel() stdout = convert_to_ascii(out) stderr = convert_to_ascii(err) return", ") @pytest.mark.timeout(60) def test_log_file_set(tmpdir, log_level, with_tty, regexes_required_lines, regexes_forbidden_lines): if is_colorama_package_available()", "get code for actual tty to run stable in docker,", "err.decode() == output exec(\"compile\") exec(\"export\", \"-J\", \"out.json\") @pytest.mark.timeout(15) @pytest.mark.parametrize_any( \"cmd\",", "characters in TTY-based terminal into uncolored characters. As such, this", "[r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\"], ), ( 3, True,", "]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting server endpoint\"], [r\"[a-z.]*[ ]*DEBUG[\\s]+[a-x\\.A-Z]*[\\s]Starting Server Rest Endpoint\"], ), ],", "timed): compiled_regexes_requires_lines = get_compiled_regexes(regexes_required_lines, timed) compiled_regexes_forbidden_lines = get_compiled_regexes(regexes_forbidden_lines, timed) for", "all_output assert \"Compile done\" in all_output assert f\"Config file {non_existing_config_file}", "Endpoint\", ], [], ), ( 2, True, True, [r\"\\x1b\\[32m[a-z.]*[ ]*INFO[\\s]*\\x1b\\[0m", "Orchestrator\", r\"Compiler version: \", r\"Extensions:\", r\"\\s*\\* core:\"], []), (True, True,", "Server Rest Endpoint\"], ), ( 3, True, [ r\"[a-z.]*[ ]*INFO[\\s]+[a-x\\.A-Z]*[\\s]Starting", "\"out.json\") @pytest.mark.timeout(15) @pytest.mark.parametrize_any( \"cmd\", [([\"-X\", \"compile\"]), ([\"compile\", \"-X\"]), ([\"compile\"]), ([\"export\",", "[], ), ( 2, False, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server endpoint\"],", "run_without_tty(args, killtime=15, termtime=10) assert return_code != 0 assert len(stderr) ==", "Rest Endpoint\"], ), ( 3, True, False, [r\"[a-z.]*[ ]*INFO[\\s]+Starting server", "except ModuleNotFoundError: return False return True def test_verify_that_colorama_package_is_not_present(): \"\"\" The", "= os.path.join(root_dir, \"log\") state_dir = os.path.join(root_dir, \"data\") for directory in", "endpoint\"], [r\"\\x1b\\[36m[a-z.]*[ ]*DEBUG[\\s]*\\x1b\\[0m \\x1b\\[34mStarting Server Rest Endpoint\"], ), ( 3,", "def check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed): compiled_regexes_requires_lines = get_compiled_regexes(regexes_required_lines, timed) compiled_regexes_forbidden_lines", "do_kill(process, killtime=5, termtime=2) print(out, err) assert code == 3 assert", "else: assert \"inmanta.ast.TypeNotFoundException: could not find type nuber in namespace\"", "License. You may obtain a copy of the License at", "w t1 = Timer(killtime, do_and_log(process.kill, \"killed process\")) t2 = Timer(termtime,", "regexes_required_lines, regexes_forbidden_lines, False) def test_init_project(tmpdir): args = [sys.executable, \"-m\", \"inmanta.app\",", "\"\"\" The colorama package turns the colored characters in TTY-based", "_, _) = run_without_tty(args, killtime=15, termtime=10) assert len(stdout) != 0", "get_command( tmp_dir, stdout_log_level=None, log_file=None, log_level_log_file=None, timed=False, dbport=None, dbname=\"inmanta\", dbhost=None, dbuser=None,", "[\"compile\"] process = do_run(args, cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert", "run_with_tty(args) else: (stdout, _, _) = run_without_tty(args) log_file = \"server.log\"", "result = [] for regex in regexes: if timed: regex", "return_code != 0 assert len(stderr) == 1 assert \"already exists\"", "def test_startup_failure(tmpdir, postgres_db, database_name): (args, log_dir) = get_command( tmpdir, dbport=postgres_db.port,", "do_run(args, cwd=snippetcompiler.project_dir) out, err = process.communicate(timeout=30) assert process.returncode == 0" ]
[ "= 0 for i in range(100): if 2 ** i", "seki(k, row // 2) if n < cri: print(\"Aoki\") else:", "row = 0 for i in range(100): if 2 **", "k = 1 cri = seki(k, row // 2) if", "// 2) if n < cri: print(\"Aoki\") else: print(\"Takahashi\") else:", "= i break def seki(k, n): for _ in range(n):", "for i in range(100): if 2 ** i <= n", "1: row = i break def seki(k, n): for _", "1) - 1: row = i break def seki(k, n):", "cri = seki(k, row // 2) if n < cri:", "= 0 if row % 2 != 0: k =", "< cri: print(\"Aoki\") else: print(\"Takahashi\") else: k = 1 cri", "= 1 cri = seki(k, row // 2) if n", "1 cri = seki(k, row // 2) if n <", "if 2 ** i <= n <= 2 ** (i", "2 cri = seki(k, row // 2) if n <", "2 ** i <= n <= 2 ** (i +", "range(n): k = 4 * k + 2 return k", "return k k = 0 if row % 2 !=", "in range(n): k = 4 * k + 2 return", "if n < cri: print(\"Aoki\") else: print(\"Takahashi\") else: k =", "!= 0: k = 2 cri = seki(k, row //", "2 != 0: k = 2 cri = seki(k, row", "if row % 2 != 0: k = 2 cri", "= seki(k, row // 2) if n < cri: print(\"Aoki\")", "int(input()) row = 0 for i in range(100): if 2", "else: k = 1 cri = seki(k, row // 2)", "<= 2 ** (i + 1) - 1: row =", "* k + 2 return k k = 0 if", "k = 2 cri = seki(k, row // 2) if", "i in range(100): if 2 ** i <= n <=", "2 return k k = 0 if row % 2", "seki(k, row // 2) if n < cri: print(\"Takahashi\") else:", "i break def seki(k, n): for _ in range(n): k", "row = i break def seki(k, n): for _ in", "(i + 1) - 1: row = i break def", "2 ** (i + 1) - 1: row = i", "_ in range(n): k = 4 * k + 2", "- 1: row = i break def seki(k, n): for", "k = 0 if row % 2 != 0: k", "in range(100): if 2 ** i <= n <= 2", "print(\"Aoki\") else: print(\"Takahashi\") else: k = 1 cri = seki(k,", "** i <= n <= 2 ** (i + 1)", "k + 2 return k k = 0 if row", "cri: print(\"Aoki\") else: print(\"Takahashi\") else: k = 1 cri =", "k k = 0 if row % 2 != 0:", "n <= 2 ** (i + 1) - 1: row", "def seki(k, n): for _ in range(n): k = 4", "0 if row % 2 != 0: k = 2", "break def seki(k, n): for _ in range(n): k =", "= 2 cri = seki(k, row // 2) if n", "range(100): if 2 ** i <= n <= 2 **", "row // 2) if n < cri: print(\"Takahashi\") else: print(\"Aoki\")", "** (i + 1) - 1: row = i break", "for _ in range(n): k = 4 * k +", "0: k = 2 cri = seki(k, row // 2)", "4 * k + 2 return k k = 0", "else: print(\"Takahashi\") else: k = 1 cri = seki(k, row", "i <= n <= 2 ** (i + 1) -", "<= n <= 2 ** (i + 1) - 1:", "= 4 * k + 2 return k k =", "0 for i in range(100): if 2 ** i <=", "n): for _ in range(n): k = 4 * k", "= int(input()) row = 0 for i in range(100): if", "+ 2 return k k = 0 if row %", "+ 1) - 1: row = i break def seki(k,", "% 2 != 0: k = 2 cri = seki(k,", "n = int(input()) row = 0 for i in range(100):", "row // 2) if n < cri: print(\"Aoki\") else: print(\"Takahashi\")", "k = 4 * k + 2 return k k", "n < cri: print(\"Aoki\") else: print(\"Takahashi\") else: k = 1", "print(\"Takahashi\") else: k = 1 cri = seki(k, row //", "= seki(k, row // 2) if n < cri: print(\"Takahashi\")", "row % 2 != 0: k = 2 cri =", "seki(k, n): for _ in range(n): k = 4 *", "2) if n < cri: print(\"Aoki\") else: print(\"Takahashi\") else: k" ]
[ "[] # Amount of players who voted yes. Checked against", "in progress \"\"\" # Check if there is a vote", "= self.console.getPlugin('admin') if not self.adminPlugin: self.error('Could not find admin plugin')", "kicked if vote passed _default_messages = { 'tovote': '^7Use ^2!yes", "maps matching your request') return False ############### NEXTMAP FUNCTIONING ################", "%s, Value: %s' % (self._vote, self._value)) self._vote = None self._value", "vote playersInGame = 0 self._allplayers = [] for c in", "= {'guest': 0, 'user': 1, 'reg': 2, 'mod': 20, 'admin':", "self.confirmFriendlyFire() elif self._vote == 'killcam': self.confirmKillCam() elif self._vote == 'scorelimit':", "confirmVote(self): self.console.say('^3Vote passed!^7') if self._vote == 'map': self.confirmMap() elif self._vote", "= len(self._amt_no) vPass = len(self._allplayers) / 2 if vNo >", "cmd): cmd = 'cmd_%s' % cmd if hasattr(self, cmd): func", "self.confirmRoundLimit() else: self.error('Unable to commit. Vote: %s, Value: %s' %", "current vote \"\"\" # Check if there is a vote", "Fixed issue where person who called vote needed to vote", "else: self.console.rotateMap() elif self._vote == 'maprestart': self.confirmMaprestart() elif self._vote ==", "of players in game _amt_no = [] _allplayers = []", "client, delay, all=True): if client.maxLevel >= 20: return True elif", "be canceled for not passing _aVotes = {} # All", "1, 'reg': 2, 'mod': 20, 'admin': 40, 'fulladmin': 60, 'senioradmin':", "1 self._allplayers.insert(0, c) if playersInGame <= 1 and client.maxLevel <", "self.confirmMap() self._mapRequested = None ############### CONFIRM VOTES ###################### def confirmVote(self):", "int(setting) except ValueError: self.debug('ERROR: Could not set new round length.", "player is asking what maps can be voted on if", "= self._value if not isinstance(setting, int): if self._value == 'on':", "rotation \"\"\" if self._mapRequested: self.confirmMap() else: self.console.rotateMap() def cmd_veto(self, data,", "in progress. Change friendlyfire mode to ^2$s^7?\", 'killcam': \"Killcam vote", "'scr_' + gt + '_timelimit' self.console.setCvar(cparams, setting) def confirmRoundLength(self): setting", "######################### VOTE TIMING ############################## def voteTimer(self): t1 = threading.Timer((self._votetime -", "\"\"\"\\ Vote NO to the current vote \"\"\" # Check", "rotation \"\"\" if not self.aquireCmdLock2(cmd, client, 60, True): client.message('^7Do not", "_allplayers = [] # Amount of players in game _mapRequested", "data[1] self._vote = type self._value = value else: client.message('^1ERROR^7: Invalid", "if self._vote: self.console.say('^110 seconds until vote end!') t2 = threading.Timer(10,", "100: client.message('^1ABORT^7: Not enough players in game to vote.') self._vote", "% x) elif len(allowed) == 0: client.message('You are not allowed", "match[0] self._value = match[0] return True elif len(match) > 1:", "len(match) > 1: match = (', ').join(match) client.message('^1ABORTED!^7Multiple matches: %s'", "round length. Voted value is not integer') return if gt", "which map is being voted for _kickRequested = None #", "else: self.console.rotateMap() def cmd_veto(self, data, client, cmd=None): \"\"\"\\ Cancel a", "(entry, value) in self.config.items('votes'): try: value = int(value) self._aVotes[entry.lower()] =", "know that vote is registered client.message('^3Your vote has been entered')", "self.config.get('commands', cmd) sp = cmd.split('-') alias = None if len(sp)", "self.console.write('map %s' % self._aMaps[self._mapRequested]) self._mapRequested = None def confirmMaprestart(self): #", "= None self._value = None return # Check if enough", "either version 2 of the License, or # (at your", "will be canceled for not passing _aVotes = {} #", "vote in progress \"\"\" # Check if there is a", "in self.config.items('votes'): try: value = int(value) self._aVotes[entry.lower()] = value except", "ValueError: self.debug('ERROR: Could not set new round length. Voted value", "write to the Free Software # Foundation, Inc., 51 Franklin", "5), self.voteMessage) t1.start() def voteMessage(self): if self._vote: self.console.say('^110 seconds until", "Using default: %s' % self._votetime) # Load votemaps section if", "if self._vote: self.console.say('^3Vote failed!') self._vote = None self._value = None", "'_roundlength' self.console.setCvar(cparams, setting) def confirmRoundLimit(self): setting = self._value amodes =", "progress. Restart current map?\", 'friendlyfire': \"Friendlyfire vote in progress. Change", "= None return # Check if player has permission to", "vote data = data.split() if len(data) == 1 and data[0]", "is allowed if self._vote not in self._aVotes: client.message('Vote type not", "maps can be called into vote. # - Fixed issue", "# Check if type of vote is allowed if self._vote", "self._vote = type self._value = value else: client.message('^1ERROR^7: Invalid usage.", "playersInGame = 0 self._allplayers = [] for c in self.console.clients.getList():", "= value except ValueError: self._aVotes[entry.lower()] = adLvl[value] self.debug('Allowed votes are:", "+ '_roundlength' self.console.setCvar(cparams, setting) def confirmRoundLimit(self): setting = self._value amodes", "Stores which vote is currently in progress _value = None", "30 # Time before a vote will be canceled for", "if self._vote: client.message('^1ERROR^7: Vote already in progress') return # Check", "################ def onGameEnd(self, event): \"\"\" Handle EVT_GAME_ROUND_END \"\"\" if self._mapRequested:", "vote passed. Kicking %s' % s.name) s.kick('Voted against', '', None,", "or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU", "v1.0.2 - Added \"!vote maps\" to show what maps can", "progress if not self._vote: client.message('No vote in progress') return #", "not q: self.debug('Vote aborted: Cannot vote for maps. mapvote turned", "self._value)) self._vote = None self._value = None self._amt_no = []", "the person to kick') self._vote = None self._value = None", "of the GNU General Public License as published by #", "currently in progress _value = None # Stores the value", "'nextmap': \"Next map vote in progress. Change next map to", "self._mapRequested = match[0] self._value = match[0] return True elif len(match)", "round limit as gametype do not have rounds') def getGameType(self):", "None) self.adminPlugin.registerCommand(self, 'allvotes', 1, self.cmd_allvotes, None) # Register events self.registerEvent('EVT_GAME_EXIT',", "not set new scorelimit. Voted value is not integer') return", "can redistribute it and/or modify # it under the terms", "not isinstance(setting, int): try: setting = int(setting) except ValueError: self.debug('ERROR:", "False ############### NEXTMAP FUNCTIONING ################ def onGameEnd(self, event): \"\"\" Handle", "v1 or v2: cmd.sayLoudOrPM(client, 'Vote enabled maps: ^2%s' % (('^7,", "mapname: a.append(mapname) elif partial in consolename: a.append(mapname) return a def", "is %s' % gametype) return False def sendBroadcast(self): # This", "'votetime') except: self.debug('Unable to get [votetime] from settings. Using default:", "maplist, partial): a = [] for mapname, consolename in maplist.iteritems():", "return # Check if enough players in game to vote", "elif a != 'maprestart' and a != 'maprotate': param =", "progress') return # Check if we have enough data for", "return func return None ######################### VOTE TIMING ############################## def voteTimer(self):", "being voted for is not valid. Toggling to next mode.", "not set new round limit. Voted value is not integer')", "vote in progress: Change map to ^3$s^7?\", 'nextmap': \"Next map", "map list') def cmd_maprotate(self, data, client, cmd=None): \"\"\"\\ Cycle to", "the Free Software Foundation; either version 2 of the License,", "# This wil broadcast vote message to server. a =", "majority of players voted vNo = len(self._amt_no) vPass = len(self._allplayers)", "self._value) if not q: self.debug('Vote aborted: Cannot vote for maps.", "== 'killcam': self.confirmKillCam() elif self._vote == 'scorelimit': self.confirmScoreLimit() elif self._vote", "client, wantedMap): # Find if map is in allowed list", "'scr_' + gt + '_scorelimit' self.console.setCvar(cparams, setting) def confirmTimeLimit(self): setting", "the License, or # (at your option) any later version.", "= None return if self._vote == 'kick': self._kickRequested = self.adminPlugin.findClientPrompt(self._value,", "(www.bigbrotherbot.net) # Copyright (C) 2015 ph03n1x # # This program", "self.debug('Setting being voted for is not valid. Toggling to next", "for Friendlyfire. Toggling to next mode') now = self.console.getCvar('scr_team_fftype').getInt() if", "self._allplayers = [] for c in self.console.clients.getList(): if c.team !=", "PARTICULAR PURPOSE. See the # GNU General Public License for", "needed. self.console.write('map %s' % self._aMaps[self._mapRequested]) self._mapRequested = None def confirmMaprestart(self):", "self.adminPlugin = self.console.getPlugin('admin') if not self.adminPlugin: self.error('Could not find admin", "have rounds') def getGameType(self): gametype = self.console.getCvar('g_gametype').getString() if gametype: return", "list the next map in rotation \"\"\" if not self.aquireCmdLock2(cmd,", "admin plugin') return # Register commands if 'commands' in self.config.sections():", "# Check if there is a vote in progress if", "amount of players in game _amt_no = [] _allplayers =", "1 and data[0] == 'maprestart' or len(data) == 1 and", "terms of the GNU General Public License as published by", "in progress') return # Check if player is allowed to", "if playersInGame <= 1 and client.maxLevel < 100: client.message('^1ABORT^7: Not", "+ '_timelimit' self.console.setCvar(cparams, setting) def confirmRoundLength(self): setting = self._value amodes", "a != 'maprotate': param = {'s': a} self.console.say(self.getMessage(self._vote, param)) self.console.say(self.getMessage('tovote'))", "current map self.console.write('fast_restart') def confirmFriendlyFire(self): # This will toggle friendly", "players voted vNo = len(self._amt_no) vPass = len(self._allplayers) / 2", "to server self.sendBroadcast() # Start timer self.voteTimer() # Set person", "# Time before a vote will be canceled for not", "Check if player has permission to call vote type v", "= int(setting) except ValueError: self.debug('ERROR: Could not set new scorelimit.", "if majority of players voted already vYes = len(self._amt_yes) vPass", "client not in self._amt_no: self._amt_no.insert(0, client) # Let player know", "= cmd.split('-') alias = None if len(sp) == 2: cmd,", "len(match) == 0: client.message('^1ABORTED!^7No maps matching your request') return False", "if not self.adminPlugin: self.error('Could not find admin plugin') return #", "vote end!') t2 = threading.Timer(10, self.denyVote) t2.start() ######################### MAP HANDLING", "available votes.') self._vote = None return # Check if player", "map is in allowed list match = self._search(self._aMaps, wantedMap) if", "registered client.message('^3Your vote has been entered') # Check if majority", "def cmd_allvotes(self, data, client, cmd=None): \"\"\"\\ Show all the votes", "self.console.getCvar('scr_game_allowkillcam').getInt() self.debug('Setting being voted for is not valid. Toggling to", "2: type = data[0] value = data[1] self._vote = type", "Check if the player already voted. If not, register vote", "length. Voted value is not integer') return if gt in", "###################### def confirmVote(self): self.console.say('^3Vote passed!^7') if self._vote == 'map': self.confirmMap()", "or client not in self._amt_no: self._amt_no.insert(0, client) # Let player", "for proper processing if self._vote == 'map' or self._vote ==", "len(allowed) > 0: p = sorted(allowed) x = (', ').join(p)", "not passing _aVotes = {} # All votes allowed. Imported", "def onLoadConfig(self): # Load settings section try: self._votetime = self.config.getint('settings',", "return True elif len(match) > 1: match = (', ').join(match)", "= [] self._allplayers = [] def confirmKick(self): # Note -", "votes allowed. Imported from \"votes\" section in config _aMaps =", "== 'kick': self.confirmKick() elif self._vote == 'maprotate': if self._mapRequested: self.confirmMap()", "== 0: setting = 1 else: setting = 0 self.console.setCvar('scr_game_allowkillcam',", "self._kickRequested = None return self._value = self._kickRequested.name else: self.debug('could not", "of players voted already vYes = len(self._amt_yes) vPass = len(self._allplayers)", "= self._kickRequested self.debug('Kick vote passed. Kicking %s' % s.name) s.kick('Voted", "Could not set new timelimit. Voted value is not integer')", "/ 2 if vYes > vPass: self.confirmVote() def cmd_no(self, data,", "currently: %s' % now) if now == 0: setting =", "== 1: self._mapRequested = match[0] self._value = match[0] return True", "in self.config.items('votemaps'): if mapname: self._aMaps[mapname] = consolename self.debug('Successfully entered maps", "if majority of players voted vNo = len(self._amt_no) vPass =", "except ValueError: self.debug('Could not set new round limit. Voted value", "self._vote == 'maprestart': self.confirmMaprestart() elif self._vote == 'friendlyfire': self.confirmFriendlyFire() elif", "for consideration of this plugin self.adminPlugin.registerCommand(self, 'nextmap', 1, self.cmd_nextmap, 'nm')", "= (', ').join(p) client.message('Allowed votes are: %s' % x) elif", "self.config.options('commands'): level = self.config.get('commands', cmd) sp = cmd.split('-') alias =", "Change friendlyfire mode to ^2$s^7?\", 'killcam': \"Killcam vote in progress.", "can be called into vote. # - Fixed issue where", "passed!^7') if self._vote == 'map': self.confirmMap() elif self._vote == 'nextmap':", "\"\"\"\\ - list the next map in rotation \"\"\" if", "Change next map to ^3$s^7?\", 'kick': \"Kick vote in progress:", "and off setting = self._value if not isinstance(setting, int): if", "2 of the License, or # (at your option) any", "for vote data = data.split() if len(data) == 1 and", "in self._amt_no: client.message('Are you drunk? You already voted!') return elif", "confirmMaprestart(self): # This will restart the current map self.console.write('fast_restart') def", "voted!') return elif client not in self._amt_yes or client not", "vote in progress. Change round length to ^2$s^7?\", 'roundlimit': \"Round", "self._mapRequested = None ############### CONFIRM VOTES ###################### def confirmVote(self): self.console.say('^3Vote", "'friendlyfire': \"Friendlyfire vote in progress. Change friendlyfire mode to ^2$s^7?\",", "= None def confirmMap(self): # This will cycle to next", "%s' % match) return False elif len(match) == 0: client.message('^1ABORTED!^7No", "in progress if requirements for vote unmet. # v1.0.2 -", "= self.checkIfAllowed(client, 'nextmap') if v1 or v2: cmd.sayLoudOrPM(client, 'Vote enabled", "USA # # Changelog: # v1.0.1 - Fixed vote remaining", "of players who voted yes. Checked against amount of players", "to the vote in progress \"\"\" # Check if there", "CodvotePlugin(b3.plugin.Plugin): adminPlugin = None _vote = None # Stores which", "############### CONFIRM VOTES ###################### def confirmVote(self): self.console.say('^3Vote passed!^7') if self._vote", "return True else: return False def checkIfAllowed(self, client, voteType): if", "Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301", "type self._value = value else: client.message('^1ERROR^7: Invalid usage. Type ^2!help", "to vote and store present players. Only players present at", "entered maps for voting: %s' % self._aMaps) # Load votes", "elif now == 0: setting = 1 self.console.setCvar('scr_team_fftype', int(setting)) def", "spam commands') return if self._mapRequested: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' %", "self.console.getCvar('g_gametype').getString() if gametype: return gametype else: self.debug('Error getting gametype. Response", "is free software; you can redistribute it and/or modify #", "vote to kick admin!') self._vote = None self._value = None", "setting = int(setting) except ValueError: now = self.console.getCvar('scr_game_allowkillcam').getInt() self.debug('Setting being", "# Register commands if 'commands' in self.config.sections(): for cmd in", "someone we need: client.kick(reason, keyword, admin, silent=True/False, data) s =", "elif self._vote == 'timelimit': self.confirmTimeLimit() elif self._vote == 'roundlength': self.confirmRoundLength()", "if vote already in progress if self._vote: client.message('^1ERROR^7: Vote already", "^2$s^7?\", 'scorelimit': \"Scorelimit vote in progress. Change score limit to", "call can vote playersInGame = 0 self._allplayers = [] for", "\"\"\" if self._mapRequested: self.confirmMap() else: self.console.rotateMap() def cmd_veto(self, data, client,", "[] self._allplayers = [] def confirmKick(self): # Note - to", "voteTimer(self): t1 = threading.Timer((self._votetime - 5), self.voteMessage) t1.start() def voteMessage(self):", "# but WITHOUT ANY WARRANTY; without even the implied warranty", "data[0] value = data[1] self._vote = type self._value = value", "int): try: setting = int(setting) except ValueError: self.debug('Could not set", "'mod': 20, 'admin': 40, 'fulladmin': 60, 'senioradmin': 80, 'superadmin': 100}", "\"Round limit vote in progress. Change round limit to ^2$s^7?\",", "This program is distributed in the hope that it will", "sp func = self.getCmd(cmd) if func: self.adminPlugin.registerCommand(self, cmd, level, func,", "to ^2$s^7?\", } def onStartup(self): self.adminPlugin = self.console.getPlugin('admin') if not", "= self.adminPlugin.findClientPrompt(self._value, client) if self._kickRequested: if self._kickRequested.maxLevel >= 20: client.message('^1ABORTED^7:", "1 and client.maxLevel < 100: client.message('^1ABORT^7: Not enough players in", "> (len(self._allplayers) / 2): self.confirmVote() def cmd_allvotes(self, data, client, cmd=None):", "None return # Check if enough players in game to", "to call any votes') def cmd_yes(self, data, client, cmd=None): \"\"\"\\", "if we have enough data for vote data = data.split()", "[] for k in self._aVotes.keys(): if client.maxLevel >= self._aVotes[k]: allowed.insert(0,", "not have rounds') def getGameType(self): gametype = self.console.getCvar('g_gametype').getString() if gametype:", "'on': setting = 1 elif self._value == 'off': setting =", "not self._vote: client.message('No vote in progress') return # Check if", "= None ############### CONFIRM VOTES ###################### def confirmVote(self): self.console.say('^3Vote passed!^7')", "next map in rotation \"\"\" if self._mapRequested: self.confirmMap() else: self.console.rotateMap()", "settings section try: self._votetime = self.config.getint('settings', 'votetime') except: self.debug('Unable to", "for vote unmet. # v1.0.2 - Added \"!vote maps\" to", "[] def denyVote(self): if self._vote: self.console.say('^3Vote failed!') self._vote = None", "self._aVotes[k]: allowed.insert(0, k) if len(allowed) > 0: p = sorted(allowed)", "return False ############### NEXTMAP FUNCTIONING ################ def onGameEnd(self, event): \"\"\"", "elif client not in self._amt_yes or client not in self._amt_no:", "def _search(self, maplist, partial): a = [] for mapname, consolename", "or cvar on server. \"\"\" # Check if vote already", "of the License, or # (at your option) any later", "setting = int(setting) except ValueError: self.debug('Could not set new round", "commands if 'commands' in self.config.sections(): for cmd in self.config.options('commands'): level", "mode') now = self.console.getCvar('scr_team_fftype').getInt() if now >= 1: setting =", "Changed to automatic yes vote. __version__ = '1.0.2' __author__ =", "Start timer self.voteTimer() # Set person who called vote as", "call this vote') self._vote = None return # Get further", "vote. __version__ = '1.0.2' __author__ = 'ph03n1x' import b3, threading", "has permission to call vote type v = self.checkIfAllowed(client, self._vote)", "= self.config.getint('settings', 'votetime') except: self.debug('Unable to get [votetime] from settings.", "None return # Seems like vote is ok. Broadcast to", "player already voted. If not, register vote if client in", "Broadcast to server self.sendBroadcast() # Start timer self.voteTimer() # Set", "Check if the player already voted if client in self._amt_yes", "not isinstance(setting, int): try: setting = int(setting) except ValueError: now", "General Public License as published by # the Free Software", "Map: ^2%s' % self._mapRequested.title()) return mapname = self.console.getNextMap() if mapname:", "self.console.clients.getList(): if c.team != b3.TEAM_SPEC: playersInGame += 1 self._allplayers.insert(0, c)", "commit. Vote: %s, Value: %s' % (self._vote, self._value)) self._vote =", "will be useful, # but WITHOUT ANY WARRANTY; without even", "value is not integer') return if gt in amodes: cparams", "if not, write to the Free Software # Foundation, Inc.,", "Changelog: # v1.0.1 - Fixed vote remaining in progress if", "= [] # Amount of players who voted yes. Checked", "limit as gametype do not have rounds') def getGameType(self): gametype", "self._kickRequested = self.adminPlugin.findClientPrompt(self._value, client) if self._kickRequested: if self._kickRequested.maxLevel >= 20:", "to kick') self._vote = None self._value = None self._kickRequested =", "self._vote: self.console.say('^3Vote failed!') self._vote = None self._value = None self._amt_no", "this vote') self._vote = None return # Get further info", "voted for _kickRequested = None # Stores which player will", "match[0] return True elif len(match) > 1: match = (',", "maps\" to show what maps can be called into vote.", "vote in progress. Change time limit to ^2$s^7?\", 'roundlength': \"Round", "self._search(self._aMaps, wantedMap) if len(match) == 1: self._mapRequested = match[0] self._value", "a vote will be canceled for not passing _aVotes =", "- 5), self.voteMessage) t1.start() def voteMessage(self): if self._vote: self.console.say('^110 seconds", "FOR A PARTICULAR PURPOSE. See the # GNU General Public", "to ^2$s^7?\", 'timelimit': \"Timelimit vote in progress. Change time limit", "if self._value == 'on': setting = 1 elif self._value ==", "to commit. Vote: %s, Value: %s' % (self._vote, self._value)) self._vote", "[] for mapname, consolename in maplist.iteritems(): if partial in mapname:", "'') self._kickRequested = None def confirmMap(self): # This will cycle", "self._mapRequested: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % self._mapRequested.title()) return mapname =", "the # GNU General Public License for more details. #", "if not v: client.message('You do not have permission to call", "to ^2$s^7?\", 'roundlength': \"Round length vote in progress. Change round", "# Load votemaps section if self.config.has_section('votemaps'): for (mapname, consolename) in", "gt in amodes: cparams = 'scr_' + gt + '_roundlength'", "or v2: cmd.sayLoudOrPM(client, 'Vote enabled maps: ^2%s' % (('^7, ^2').join(self._aMaps.keys())))", "the next map in rotation \"\"\" if not self.aquireCmdLock2(cmd, client,", "self._vote = None self._value = None return else: client.message('^2You do", "__author__ = 'ph03n1x' import b3, threading import b3.plugin import b3.events", "else: self.error('Unable to commit. Vote: %s, Value: %s' % (self._vote,", "(self._vote, self._value)) self._vote = None self._value = None self._amt_no =", "% self._aMaps[self._mapRequested]) self._mapRequested = None def confirmMaprestart(self): # This will", "try: self._votetime = self.config.getint('settings', 'votetime') except: self.debug('Unable to get [votetime]", "enabled maps: ^2%s' % (('^7, ^2').join(self._aMaps.keys()))) self._vote = None self._value", "if mapname: self._aMaps[mapname] = consolename self.debug('Successfully entered maps for voting:", "gt + '_scorelimit' self.console.setCvar(cparams, setting) def confirmTimeLimit(self): setting = self._value", "of players in game _mapRequested = None # Stores which", "in amodes: cparams = 'scr_' + gt + '_roundlength' self.console.setCvar(cparams,", "call vote type v = self.checkIfAllowed(client, self._vote) if not v:", "data, client, cmd=None): \"\"\"\\ Cancel a vote in progress \"\"\"", "elif self._vote == 'kick': self.confirmKick() elif self._vote == 'maprotate': if", "cannot enter current vote') return # Check if the player", "self._vote == 'killcam': self.confirmKillCam() elif self._vote == 'scorelimit': self.confirmScoreLimit() elif", "self.mapvote(client, self._value) if not q: self.debug('Vote aborted: Cannot vote for", "def cmd_maprotate(self, data, client, cmd=None): \"\"\"\\ Cycle to next map", "').join(match) client.message('^1ABORTED!^7Multiple matches: %s' % match) return False elif len(match)", "value = data[1] self._vote = type self._value = value else:", "to get [votetime] from settings. Using default: %s' % self._votetime)", "not valid. Toggling to next mode. Killcam currently: %s' %", "self.debug('ERROR: Could not set new scorelimit. Voted value is not", "= int(setting) except ValueError: self.debug('ERROR: Could not set new timelimit.", "!= 'maprotate': param = {'s': a} self.console.say(self.getMessage(self._vote, param)) self.console.say(self.getMessage('tovote')) def", "= data.split() if len(data) == 1 and data[0] == 'maprotate'", "} def onStartup(self): self.adminPlugin = self.console.getPlugin('admin') if not self.adminPlugin: self.error('Could", "vote.') self._vote = None return # Check if type of", "in progress. Go to next map?\", 'maprestart': \"Maprestart vote in", "not v: client.message('You do not have permission to call this", "if v1 or v2: cmd.sayLoudOrPM(client, 'Vote enabled maps: ^2%s' %", "game _amt_no = [] _allplayers = [] # Amount of", "'scr_' + gt + '_roundlimit' self.console.setCvar(cparams, setting) else: self.debug('Could not", "0: p = sorted(allowed) x = (', ').join(p) client.message('Allowed votes", "amodes: cparams = 'scr_' + gt + '_roundlength' self.console.setCvar(cparams, setting)", "a = [] for mapname, consolename in maplist.iteritems(): if partial", "client.maxLevel < 100: client.message('^1ABORT^7: Not enough players in game to", "return # Seems like vote is ok. Broadcast to server", "new timelimit. Voted value is not integer') return cparams =", "client not in self._amt_yes or client not in self._amt_no: self._amt_yes.insert(0,", "map vote in progress. Go to next map?\", 'maprestart': \"Maprestart", "a == 'maprotate': self.console.say(self.getMessage(self._vote)) elif a != 'maprestart' and a", "return if self._vote == 'kick': self._kickRequested = self.adminPlugin.findClientPrompt(self._value, client) if", "if vote passed _default_messages = { 'tovote': '^7Use ^2!yes ^7or", "else: setting = 0 self.console.setCvar('scr_game_allowkillcam', int(setting)) def confirmScoreLimit(self): # CVAR", "False elif len(match) == 0: client.message('^1ABORTED!^7No maps matching your request')", "gt + '_roundlimit' self.console.setCvar(cparams, setting) else: self.debug('Could not set round", "sendBroadcast(self): # This wil broadcast vote message to server. a", "aquireCmdLock2(self, cmd, client, delay, all=True): if client.maxLevel >= 20: return", "to vote.') self._vote = None return # Check if type", "None self._value = None self._kickRequested = None return # Seems", "client.maxLevel >= 20: return True elif cmd.time + 5 <=", "map in rotation \"\"\" if self._mapRequested: self.confirmMap() else: self.console.rotateMap() def", "copy of the GNU General Public License # along with", "a def mapvote(self, client, wantedMap): # Find if map is", "^2$s^7?\", 'timelimit': \"Timelimit vote in progress. Change time limit to", "confirmFriendlyFire(self): # This will toggle friendly fire on and off", "integer') return cparams = 'scr_' + gt + '_timelimit' self.console.setCvar(cparams,", "your request') return False ############### NEXTMAP FUNCTIONING ################ def onGameEnd(self,", "setting = 0 self.console.setCvar('scr_game_allowkillcam', int(setting)) def confirmScoreLimit(self): # CVAR to", "is currently in progress _value = None # Stores the", "maps: ^2%s' % (('^7, ^2').join(self._aMaps.keys()))) self._vote = None self._value =", "def confirmMap(self): # This will cycle to next map when", "to next map in rotation \"\"\" if self._mapRequested: self.confirmMap() else:", "2 if vYes > vPass: self.confirmVote() def cmd_no(self, data, client=None,", "call \"\"\" allowed = [] for k in self._aVotes.keys(): if", "setting = 0 else: self.debug('Unknown wanted setting for Friendlyfire. Toggling", "will cycle to next map when needed. self.console.write('map %s' %", "'^7Next Map: ^2%s' % self._mapRequested.title()) return mapname = self.console.getNextMap() if", "allowed. Use ^2!allvotes ^7for available votes.') self._vote = None return", "self.config.has_section('votemaps'): for (mapname, consolename) in self.config.items('votemaps'): if mapname: self._aMaps[mapname] =", "cmd_no(self, data, client=None, cmd=None): \"\"\"\\ Vote NO to the current", "# All votes allowed. Imported from \"votes\" section in config", "to next map when needed. self.console.write('map %s' % self._aMaps[self._mapRequested]) self._mapRequested", "self.checkIfAllowed(client, self._vote) if not v: client.message('You do not have permission", "= 1 else: setting = 0 self.console.setCvar('scr_game_allowkillcam', int(setting)) def confirmScoreLimit(self):", "client not in self._amt_yes or client not in self._amt_no: self._amt_no.insert(0,", "already voted. If not, register vote if client in self._amt_yes", "useful, # but WITHOUT ANY WARRANTY; without even the implied", "setting) def confirmRoundLength(self): setting = self._value amodes = ['ctf', 'sd',", "All vote allowed maps. Imported from \"votemaps\" section in config", "self.confirmMap() elif self._vote == 'nextmap': self.debug('nextmap vote passed. Params already", "can vote playersInGame = 0 self._allplayers = [] for c", "later version. # # This program is distributed in the", "int(setting) except ValueError: self.debug('ERROR: Could not set new scorelimit. Voted", "# You should have received a copy of the GNU", "have enough data for vote data = data.split() if len(data)", "self.console.setCvar('scr_team_fftype', int(setting)) def confirmKillCam(self): # rcon for killcam: scr_game_allowkillcam -", "- Fixed vote remaining in progress if requirements for vote", "valid. Toggling to next mode. Killcam currently: %s' % now)", "end!') t2 = threading.Timer(10, self.denyVote) t2.start() ######################### MAP HANDLING ##############################", "not self.adminPlugin: self.error('Could not find admin plugin') return # Register", "self.confirmKillCam() elif self._vote == 'scorelimit': self.confirmScoreLimit() elif self._vote == 'timelimit':", "self.checkIfAllowed(client, 'nextmap') if v1 or v2: cmd.sayLoudOrPM(client, 'Vote enabled maps:", "\"Maprestart vote in progress. Restart current map?\", 'friendlyfire': \"Friendlyfire vote", "where person who called vote needed to vote as well.", "If not, register vote if client in self._amt_yes or client", "if type of vote is allowed if self._vote not in", "if not self.aquireCmdLock2(cmd, client, 60, True): client.message('^7Do not spam commands')", "the value of the vote _votetime = 30 # Time", "self.confirmMap() else: self.console.rotateMap() def cmd_veto(self, data, client, cmd=None): \"\"\"\\ Cancel", "client) if len(self._amt_yes) > (len(self._allplayers) / 2): self.confirmVote() def cmd_allvotes(self,", "Time before a vote will be canceled for not passing", "map when needed. self.console.write('map %s' % self._aMaps[self._mapRequested]) self._mapRequested = None", "self._vote == 'timelimit': self.confirmTimeLimit() elif self._vote == 'roundlength': self.confirmRoundLength() elif", "mapname) else: client.message('^1Error:^7 could not get map list') def cmd_maprotate(self,", "setting = self._value gt = self.getGameType() if not isinstance(setting, int):", "'friendlyfire': self.confirmFriendlyFire() elif self._vote == 'killcam': self.confirmKillCam() elif self._vote ==", "== 'roundlength': self.confirmRoundLength() elif self._vote == 'roundlimit': self.confirmRoundLimit() else: self.error('Unable", "into vote. # - Fixed issue where person who called", "now == 0: setting = 1 else: setting = 0", "setting) def confirmTimeLimit(self): setting = self._value gt = self.getGameType() if", "store present players. Only players present at vote call can", "self._value == 'on': setting = 1 elif self._value == 'off':", "is being voted for _kickRequested = None # Stores which", "map vote in progress. Change next map to ^3$s^7?\", 'kick':", "proper processing if self._vote == 'map' or self._vote == 'nextmap':", "client.message('You are not allowed to call any votes') def cmd_yes(self,", "self._vote == 'kick': self.confirmKick() elif self._vote == 'maprotate': if self._mapRequested:", "all the votes you are allowed to call \"\"\" allowed", "self._mapRequested: self.confirmMap() else: self.console.rotateMap() elif self._vote == 'maprestart': self.confirmMaprestart() elif", "len(data) == 1 and data[0] == 'maprestart' or len(data) ==", "next map?\", 'maprestart': \"Maprestart vote in progress. Restart current map?\",", "will toggle friendly fire on and off setting = self._value", "self._vote = None self._value = None return # Check if", "cmd=None): \"\"\"\\ Cancel a vote in progress \"\"\" if self._vote:", "what maps can be called into vote. # - Fixed", "events self.registerEvent('EVT_GAME_EXIT', self.onGameEnd) def onLoadConfig(self): # Load settings section try:", "St, Fifth Floor, Boston, MA 02110-1301 USA # # Changelog:", "section in config _aMaps = {} # All vote allowed", "vote in progress: Kick ^2$s^7?\", 'maprotate': \"Rotate map vote in", "votes') self._vote = None self._value = None return # Check", "'scr_' + gt + '_roundlength' self.console.setCvar(cparams, setting) def confirmRoundLimit(self): setting", "this plugin self.adminPlugin.registerCommand(self, 'nextmap', 1, self.cmd_nextmap, 'nm') self.adminPlugin.registerCommand(self, 'maprotate', 20,", "cmd=None): \"\"\"\\ !vote <setting> <value> - vote to change setting", "yes vote self._amt_yes.insert(0, client) if len(self._amt_yes) > (len(self._allplayers) / 2):", "but WITHOUT ANY WARRANTY; without even the implied warranty of", "round limit. Voted value is not integer') return if gt", "allowed list match = self._search(self._aMaps, wantedMap) if len(match) == 1:", "# Check if majority of players voted already vYes =", "'nextmap') if v1 or v2: cmd.sayLoudOrPM(client, 'Vote enabled maps: ^2%s'", "client, cmd=None): \"\"\"\\ Cycle to next map in rotation \"\"\"", "= [] def denyVote(self): if self._vote: self.console.say('^3Vote failed!') self._vote =", "(', ').join(match) client.message('^1ABORTED!^7Multiple matches: %s' % match) return False elif", "# Stores which map is being voted for _kickRequested =", "in progress if not self._vote: client.message('No vote in progress') return", "this program; if not, write to the Free Software #", "the player already voted. If not, register vote if client", "Checked against amount of players in game _amt_no = []", "stored') elif self._vote == 'kick': self.confirmKick() elif self._vote == 'maprotate':", "= None _vote = None # Stores which vote is", "\"\"\"\\ !vote <setting> <value> - vote to change setting or", "partial in mapname: a.append(mapname) elif partial in consolename: a.append(mapname) return", "1 elif self._value == 'off': setting = 0 else: self.debug('Unknown", "{ 'tovote': '^7Use ^2!yes ^7or ^2!no ^7 to vote', 'map':", "self.debug('ERROR: Could not set new round length. Voted value is", "in progress. Change next map to ^3$s^7?\", 'kick': \"Kick vote", "= None return # Get further info for proper processing", "if not q: self.debug('Vote aborted: Cannot vote for maps. mapvote", "############################## def _search(self, maplist, partial): a = [] for mapname,", "setting = 1 self.console.setCvar('scr_team_fftype', int(setting)) def confirmKillCam(self): # rcon for", "This will restart the current map self.console.write('fast_restart') def confirmFriendlyFire(self): #", "playersInGame <= 1 and client.maxLevel < 100: client.message('^1ABORT^7: Not enough", "voted for is not valid. Toggling to next mode. Killcam", "^7for info') return # Check if player is asking what", "except ValueError: self.debug('ERROR: Could not set new timelimit. Voted value", "have permission to call this vote') self._vote = None return", "and data[0] == 'maps': self._vote = data[0] self._value = data[0]", "[] self._amt_yes = [] self._allplayers = [] def confirmKick(self): #", "friendlyfire mode to ^2$s^7?\", 'killcam': \"Killcam vote in progress. Turn", "and store present players. Only players present at vote call", "a copy of the GNU General Public License # along", "votes you are allowed to call \"\"\" allowed = []", "server. \"\"\" # Check if vote already in progress if", "Go to next map?\", 'maprestart': \"Maprestart vote in progress. Restart", "we need: client.kick(reason, keyword, admin, silent=True/False, data) s = self._kickRequested", "20, 'admin': 40, 'fulladmin': 60, 'senioradmin': 80, 'superadmin': 100} for", "Could not set new round length. Voted value is not", "consolename: a.append(mapname) return a def mapvote(self, client, wantedMap): # Find", "\"Killcam vote in progress. Turn killcam ^2$s^7?\", 'scorelimit': \"Scorelimit vote", "None _vote = None # Stores which vote is currently", "match = self._search(self._aMaps, wantedMap) if len(match) == 1: self._mapRequested =", "v2: cmd.sayLoudOrPM(client, 'Vote enabled maps: ^2%s' % (('^7, ^2').join(self._aMaps.keys()))) self._vote", "+ '_roundlimit' self.console.setCvar(cparams, setting) else: self.debug('Could not set round limit", "/ 2): self.confirmVote() def cmd_allvotes(self, data, client, cmd=None): \"\"\"\\ Show", "self._amt_no: self._amt_yes.insert(0, client) # Let player know that vote is", "True): client.message('^7Do not spam commands') return if self._mapRequested: cmd.sayLoudOrPM(client, '^7Next", "Public License # along with this program; if not, write", "s = self._kickRequested self.debug('Kick vote passed. Kicking %s' % s.name)", "if self._vote == 'kick': self._kickRequested = self.adminPlugin.findClientPrompt(self._value, client) if self._kickRequested:", "self._aVotes[voteType]: return True else: return False ################################################################################# # COMMANDS #", "onStartup(self): self.adminPlugin = self.console.getPlugin('admin') if not self.adminPlugin: self.error('Could not find", "in self.config.options('commands'): level = self.config.get('commands', cmd) sp = cmd.split('-') alias", "60, 'senioradmin': 80, 'superadmin': 100} for (entry, value) in self.config.items('votes'):", "consolename) in self.config.items('votemaps'): if mapname: self._aMaps[mapname] = consolename self.debug('Successfully entered", "len(sp) == 2: cmd, alias = sp func = self.getCmd(cmd)", "request') return False ############### NEXTMAP FUNCTIONING ################ def onGameEnd(self, event):", "confirmMap(self): # This will cycle to next map when needed.", "self.config.items('votes'): try: value = int(value) self._aVotes[entry.lower()] = value except ValueError:", "cmd) sp = cmd.split('-') alias = None if len(sp) ==", "Public License as published by # the Free Software Foundation;", "vote \"\"\" # Check if there is a vote in", "the vote _votetime = 30 # Time before a vote", "% mapname) else: client.message('^1Error:^7 could not get map list') def", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or", "time limit to ^2$s^7?\", 'roundlength': \"Round length vote in progress.", "in amodes: cparams = 'scr_' + gt + '_roundlimit' self.console.setCvar(cparams,", "cparams = 'scr_' + gt + '_roundlimit' self.console.setCvar(cparams, setting) else:", "self.console.say(self.getMessage(self._vote)) elif a != 'maprestart' and a != 'maprotate': param", "= sorted(allowed) x = (', ').join(p) client.message('Allowed votes are: %s'", "is distributed in the hope that it will be useful,", "further info for proper processing if self._vote == 'map' or", "client, cmd=None): \"\"\"\\ Show all the votes you are allowed", "vote in progress. Change friendlyfire mode to ^2$s^7?\", 'killcam': \"Killcam", "mode to ^2$s^7?\", 'killcam': \"Killcam vote in progress. Turn killcam", "vote in progress. Change round limit to ^2$s^7?\", } def", "== 'friendlyfire': self.confirmFriendlyFire() elif self._vote == 'killcam': self.confirmKillCam() elif self._vote", "vote is currently in progress _value = None # Stores", "client.message('^1ERROR^7: Vote already in progress') return # Check if we", "= None # Stores which player will be kicked if", "self.console.say('^3Vote passed!^7') if self._vote == 'map': self.confirmMap() elif self._vote ==", "# This program is distributed in the hope that it", "elif self._vote == 'maprotate': if self._mapRequested: self.confirmMap() else: self.console.rotateMap() elif", "def cmd_no(self, data, client=None, cmd=None): \"\"\"\\ Vote NO to the", "q: self.debug('Vote aborted: Cannot vote for maps. mapvote turned out", "called vote as yes vote self._amt_yes.insert(0, client) if len(self._amt_yes) >", "Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston,", "mode. Killcam currently: %s' % now) if now == 0:", "players who voted yes. Checked against amount of players in", "WITHOUT ANY WARRANTY; without even the implied warranty of #", "passed. Params already stored') elif self._vote == 'kick': self.confirmKick() elif", "the current map self.console.write('fast_restart') def confirmFriendlyFire(self): # This will toggle", "51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA #", "now = self.console.getCvar('scr_team_fftype').getInt() if now >= 1: setting = 0", "= int(setting) except ValueError: now = self.console.getCvar('scr_game_allowkillcam').getInt() self.debug('Setting being voted", "scorelimit. Voted value is not integer') return cparams = 'scr_'", "= int(setting) except ValueError: self.debug('ERROR: Could not set new round", "return if self._mapRequested: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % self._mapRequested.title()) return", "self._value = None self._kickRequested = None return # Seems like", "elif self._vote == 'nextmap': self.debug('nextmap vote passed. Params already stored')", "self.error('Unable to commit. Vote: %s, Value: %s' % (self._vote, self._value))", "self.error('Could not find admin plugin') return # Register commands if", "Stores which player will be kicked if vote passed _default_messages", "^2$s^7?\", 'roundlength': \"Round length vote in progress. Change round length", "or # (at your option) any later version. # #", "'', None, True, '') self._kickRequested = None def confirmMap(self): #", "\"\"\" if not self.aquireCmdLock2(cmd, client, 60, True): client.message('^7Do not spam", "Killcam currently: %s' % now) if now == 0: setting", "\"Rotate map vote in progress. Go to next map?\", 'maprestart':", "not find admin plugin') return # Register commands if 'commands'", "cmd_maprotate(self, data, client, cmd=None): \"\"\"\\ Cycle to next map in", "0, 'user': 1, 'reg': 2, 'mod': 20, 'admin': 40, 'fulladmin':", "= None def confirmMaprestart(self): # This will restart the current", "self.console.write('fast_restart') def confirmFriendlyFire(self): # This will toggle friendly fire on", "[] _allplayers = [] # Amount of players in game", "self._amt_no: self._amt_no.insert(0, client) # Let player know that vote is", "Amount of players who voted yes. Checked against amount of", "_vote = None # Stores which vote is currently in", "== 'timelimit': self.confirmTimeLimit() elif self._vote == 'roundlength': self.confirmRoundLength() elif self._vote", "Vote already in progress') return # Check if we have", "if self._mapRequested: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % self._mapRequested.title()) return mapname", "self.console.say('^110 seconds until vote end!') t2 = threading.Timer(10, self.denyVote) t2.start()", "_value = None # Stores the value of the vote", "c) if playersInGame <= 1 and client.maxLevel < 100: client.message('^1ABORT^7:", "'maps': self._vote = data[0] self._value = data[0] elif len(data) ==", "= [] for k in self._aVotes.keys(): if client.maxLevel >= self._aVotes[k]:", "'senioradmin': 80, 'superadmin': 100} for (entry, value) in self.config.items('votes'): try:", "= None self._amt_no = [] self._amt_yes = [] self._allplayers =", "return True else: return False ################################################################################# # COMMANDS # #################################################################################", "if func: self.adminPlugin.registerCommand(self, cmd, level, func, alias) # Re-deploy commands", "Seems like vote is ok. Broadcast to server self.sendBroadcast() #", "confirmKillCam(self): # rcon for killcam: scr_game_allowkillcam - 0 or 1", "cmd in self.config.options('commands'): level = self.config.get('commands', cmd) sp = cmd.split('-')", "import b3, threading import b3.plugin import b3.events class CodvotePlugin(b3.plugin.Plugin): adminPlugin", "enough players in game to vote.') self._vote = None return", "# # This program is free software; you can redistribute", "settings. Using default: %s' % self._votetime) # Load votemaps section", "next mode. Killcam currently: %s' % now) if now ==", "if len(sp) == 2: cmd, alias = sp func =", "already stored') elif self._vote == 'kick': self.confirmKick() elif self._vote ==", "True, '') self._kickRequested = None def confirmMap(self): # This will", "data, client, cmd=None): \"\"\"\\ Show all the votes you are", "q = self.mapvote(client, self._value) if not q: self.debug('Vote aborted: Cannot", "to next mode') now = self.console.getCvar('scr_team_fftype').getInt() if now >= 1:", "alias) # Re-deploy commands for consideration of this plugin self.adminPlugin.registerCommand(self,", "been entered') # Check if majority of players voted already", "All votes allowed. Imported from \"votes\" section in config _aMaps", "= data[1] self._vote = type self._value = value else: client.message('^1ERROR^7:", "section if self.config.has_section('votemaps'): for (mapname, consolename) in self.config.items('votemaps'): if mapname:", "wantedMap): # Find if map is in allowed list match", "p = sorted(allowed) x = (', ').join(p) client.message('Allowed votes are:", "self._value = data[0] elif len(data) == 2: type = data[0]", "option) any later version. # # This program is distributed", "vPass: self.denyVote() def cmd_nextmap(self, data, client=None, cmd=None): \"\"\"\\ - list", "self._value = None return # Check if enough players in", "# Stores which vote is currently in progress _value =", "cmd.time + 5 <= self.console.time(): return True else: return False", "'user': 1, 'reg': 2, 'mod': 20, 'admin': 40, 'fulladmin': 60,", "# Stores which player will be kicked if vote passed", "data[0] == 'maprotate' or len(data) == 1 and data[0] ==", "^2$s^7?\", 'maprotate': \"Rotate map vote in progress. Go to next", "confirmScoreLimit(self): # CVAR to write is scr_<gametype>_scorelimit <number> setting =", "else: client.message('^1Error:^7 could not get map list') def cmd_maprotate(self, data,", "+ gt + '_scorelimit' self.console.setCvar(cparams, setting) def confirmTimeLimit(self): setting =", "# # Changelog: # v1.0.1 - Fixed vote remaining in", "votes are: %s' % x) elif len(allowed) == 0: client.message('You", "as published by # the Free Software Foundation; either version", "self.adminPlugin.registerCommand(self, 'nextmap', 1, self.cmd_nextmap, 'nm') self.adminPlugin.registerCommand(self, 'maprotate', 20, self.cmd_maprotate, None)", "self._aMaps) # Load votes section if self.config.has_section('votes'): adLvl = {'guest':", "the votes you are allowed to call \"\"\" allowed =", "kick someone we need: client.kick(reason, keyword, admin, silent=True/False, data) s", "cmd=None): \"\"\"\\ Cycle to next map in rotation \"\"\" if", "threading.Timer((self._votetime - 5), self.voteMessage) t1.start() def voteMessage(self): if self._vote: self.console.say('^110", "02110-1301 USA # # Changelog: # v1.0.1 - Fixed vote", "self._allplayers = [] def denyVote(self): if self._vote: self.console.say('^3Vote failed!') self._vote", "func return None ######################### VOTE TIMING ############################## def voteTimer(self): t1", "if enough players in game to vote and store present", "if len(self._amt_yes) > (len(self._allplayers) / 2): self.confirmVote() def cmd_allvotes(self, data,", "(('^7, ^2').join(self._aMaps.keys()))) self._vote = None self._value = None return else:", "{} # All votes allowed. Imported from \"votes\" section in", "- to kick someone we need: client.kick(reason, keyword, admin, silent=True/False,", "License for more details. # # You should have received", "This will cycle to next map when needed. self.console.write('map %s'", "Load votes section if self.config.has_section('votes'): adLvl = {'guest': 0, 'user':", "EVT_GAME_ROUND_END \"\"\" if self._mapRequested: self.confirmMap() self._mapRequested = None ############### CONFIRM", "for voting: %s' % self._aMaps) # Load votes section if", "drunk? You already voted!') return elif client not in self._amt_yes", "return False elif len(match) == 0: client.message('^1ABORTED!^7No maps matching your", "cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % self._mapRequested.title()) return mapname = self.console.getNextMap()", "issue where person who called vote needed to vote as", "^2!allvotes ^7for available votes.') self._vote = None return # Check", "cmd_nextmap(self, data, client=None, cmd=None): \"\"\"\\ - list the next map", "the Free Software # Foundation, Inc., 51 Franklin St, Fifth", "in progress. Change time limit to ^2$s^7?\", 'roundlength': \"Round length", "voted. If not, register vote if client in self._amt_yes or", "= {} # All votes allowed. Imported from \"votes\" section", "to change setting or cvar on server. \"\"\" # Check", "plugin self.adminPlugin.registerCommand(self, 'nextmap', 1, self.cmd_nextmap, 'nm') self.adminPlugin.registerCommand(self, 'maprotate', 20, self.cmd_maprotate,", "# Stores the value of the vote _votetime = 30", "(mapname, consolename) in self.config.items('votemaps'): if mapname: self._aMaps[mapname] = consolename self.debug('Successfully", "cmd=None): \"\"\"\\ Show all the votes you are allowed to", ">= self._aVotes[voteType]: return True else: return False ################################################################################# # COMMANDS", "> 0: p = sorted(allowed) x = (', ').join(p) client.message('Allowed", "a == 'maprestart' or a == 'maprotate': self.console.say(self.getMessage(self._vote)) elif a", "in game _mapRequested = None # Stores which map is", "yes vote. __version__ = '1.0.2' __author__ = 'ph03n1x' import b3,", "program; if not, write to the Free Software # Foundation,", "vote in progress. Change score limit to ^2$s^7?\", 'timelimit': \"Timelimit", "self._aVotes) def getCmd(self, cmd): cmd = 'cmd_%s' % cmd if", "gt + '_roundlength' self.console.setCvar(cparams, setting) def confirmRoundLimit(self): setting = self._value", "details. # # You should have received a copy of", "vNo > vPass: self.denyVote() def cmd_nextmap(self, data, client=None, cmd=None): \"\"\"\\", "confirmTimeLimit(self): setting = self._value gt = self.getGameType() if not isinstance(setting,", "import b3.plugin import b3.events class CodvotePlugin(b3.plugin.Plugin): adminPlugin = None _vote", "This program is free software; you can redistribute it and/or", "== 'off': setting = 0 else: self.debug('Unknown wanted setting for", "gametype) return False def sendBroadcast(self): # This wil broadcast vote", "self.debug('nextmap vote passed. Params already stored') elif self._vote == 'kick':", "at vote call can vote playersInGame = 0 self._allplayers =", "not, register vote if client in self._amt_yes or client in", "aborted: Cannot vote for maps. mapvote turned out false') self._vote", "= self.getCmd(cmd) if func: self.adminPlugin.registerCommand(self, cmd, level, func, alias) #", "self.config.sections(): for cmd in self.config.options('commands'): level = self.config.get('commands', cmd) sp", "self.adminPlugin.registerCommand(self, cmd, level, func, alias) # Re-deploy commands for consideration", "'tovote': '^7Use ^2!yes ^7or ^2!no ^7 to vote', 'map': \"Map", "set new round length. Voted value is not integer') return", "None ############### CONFIRM VOTES ###################### def confirmVote(self): self.console.say('^3Vote passed!^7') if", "= 0 self._allplayers = [] for c in self.console.clients.getList(): if", "progress') return # Check if player is allowed to vote", "== 'maprotate': self.console.say(self.getMessage(self._vote)) elif a != 'maprestart' and a !=", "not allowed to call any votes') def cmd_yes(self, data, client,", "in progress: Kick ^2$s^7?\", 'maprotate': \"Rotate map vote in progress.", "self.confirmKick() elif self._vote == 'maprotate': if self._mapRequested: self.confirmMap() else: self.console.rotateMap()", "^2!yes ^7or ^2!no ^7 to vote', 'map': \"Map vote in", "isinstance(setting, int): try: setting = int(setting) except ValueError: self.debug('Could not", "elif len(data) == 2: type = data[0] value = data[1]", "received a copy of the GNU General Public License #", "being voted for _kickRequested = None # Stores which player", "Copyright (C) 2015 ph03n1x # # This program is free", "/ 2 if vNo > vPass: self.denyVote() def cmd_nextmap(self, data,", "= self._value gt = self.getGameType() if not isinstance(setting, int): try:", "could not get map list') def cmd_maprotate(self, data, client, cmd=None):", "CoDVote plugin for BigBrotherBot(B3) (www.bigbrotherbot.net) # Copyright (C) 2015 ph03n1x", "self._vote == 'maps': v1 = self.checkIfAllowed(client, 'map') v2 = self.checkIfAllowed(client,", "Amount of players in game _mapRequested = None # Stores", "# (at your option) any later version. # # This", "See the # GNU General Public License for more details.", "0: setting = 1 else: setting = 0 self.console.setCvar('scr_game_allowkillcam', int(setting))", "Let player know that vote is registered client.message('^3Your vote has", "not set new timelimit. Voted value is not integer') return", "self._aMaps[mapname] = consolename self.debug('Successfully entered maps for voting: %s' %", "self.voteMessage) t1.start() def voteMessage(self): if self._vote: self.console.say('^110 seconds until vote", "return None ######################### VOTE TIMING ############################## def voteTimer(self): t1 =", "not in self._amt_yes or client not in self._amt_no: self._amt_no.insert(0, client)", "self._amt_no.insert(0, client) # Let player know that vote is registered", "cmd=None): \"\"\"\\ Vote NO to the current vote \"\"\" #", "that it will be useful, # but WITHOUT ANY WARRANTY;", "cmd, alias = sp func = self.getCmd(cmd) if func: self.adminPlugin.registerCommand(self,", "self.console.say('^3Vote failed!') self._vote = None self._value = None self._amt_no =", "in self._amt_yes or client not in self._amt_no: self._amt_yes.insert(0, client) #", "'superadmin': 100} for (entry, value) in self.config.items('votes'): try: value =", "vote. # - Fixed issue where person who called vote", "len(self._amt_yes) > (len(self._allplayers) / 2): self.confirmVote() def cmd_allvotes(self, data, client,", "maps. Imported from \"votemaps\" section in config _amt_yes = []", "1 elif self._value == 'off': setting = 0 if not", "votes are: %s' % self._aVotes) def getCmd(self, cmd): cmd =", "playersInGame += 1 self._allplayers.insert(0, c) if playersInGame <= 1 and", "if not isinstance(setting, int): if self._value == 'on': setting =", ">= 20: client.message('^1ABORTED^7: Cannot vote to kick admin!') self._vote =", "if self._vote not in self._aVotes: client.message('Vote type not allowed. Use", "Restart current map?\", 'friendlyfire': \"Friendlyfire vote in progress. Change friendlyfire", "in progress. Change round limit to ^2$s^7?\", } def onStartup(self):", "'map': self.confirmMap() elif self._vote == 'nextmap': self.debug('nextmap vote passed. Params", "FUNCTIONING ################ def onGameEnd(self, event): \"\"\" Handle EVT_GAME_ROUND_END \"\"\" if", "if self._mapRequested: self.confirmMap() else: self.console.rotateMap() def cmd_veto(self, data, client, cmd=None):", "'dom'] gt = self.getGameType() if not isinstance(setting, int): try: setting", "be called into vote. # - Fixed issue where person", "to call map votes') self._vote = None self._value = None", "< 100: client.message('^1ABORT^7: Not enough players in game to vote.')", "self.console.getPlugin('admin') if not self.adminPlugin: self.error('Could not find admin plugin') return", "= None return # Check if type of vote is", "'cmd_%s' % cmd if hasattr(self, cmd): func = getattr(self, cmd)", "'map' or self._vote == 'nextmap': q = self.mapvote(client, self._value) if", "to kick admin!') self._vote = None self._value = None self._kickRequested", "% (self._vote, self._value)) self._vote = None self._value = None self._amt_no", "# Amount of players who voted yes. Checked against amount", "isinstance(setting, int): try: setting = int(setting) except ValueError: self.debug('ERROR: Could", "voted if client in self._amt_yes or client in self._amt_no: client.message('Are", "client, voteType): if client.maxLevel >= self._aVotes[voteType]: return True else: return", "vote as yes vote self._amt_yes.insert(0, client) if len(self._amt_yes) > (len(self._allplayers)", "'^7Use ^2!yes ^7or ^2!no ^7 to vote', 'map': \"Map vote", "progress if self._vote: client.message('^1ERROR^7: Vote already in progress') return #", "against amount of players in game _amt_no = [] _allplayers", "= len(self._allplayers) / 2 if vYes > vPass: self.confirmVote() def", "# # This program is distributed in the hope that", "v2 = self.checkIfAllowed(client, 'nextmap') if v1 or v2: cmd.sayLoudOrPM(client, 'Vote", "in game to vote.') self._vote = None return # Check", "client.message('Sorry, you cannot enter current vote') return # Check if", "self.debug('Could not set round limit as gametype do not have", "\"Scorelimit vote in progress. Change score limit to ^2$s^7?\", 'timelimit':", "if len(data) == 1 and data[0] == 'maprotate' or len(data)", "0: setting = 1 self.console.setCvar('scr_team_fftype', int(setting)) def confirmKillCam(self): # rcon", "allowed to call \"\"\" allowed = [] for k in", "match = (', ').join(match) client.message('^1ABORTED!^7Multiple matches: %s' % match) return", "not self.aquireCmdLock2(cmd, client, 60, True): client.message('^7Do not spam commands') return", "in self._aVotes.keys(): if client.maxLevel >= self._aVotes[k]: allowed.insert(0, k) if len(allowed)", "person who called vote needed to vote as well. Changed", "and/or modify # it under the terms of the GNU", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "gametype do not have rounds') def getGameType(self): gametype = self.console.getCvar('g_gametype').getString()", "Check if majority of players voted already vYes = len(self._amt_yes)", "\"\"\" if self._mapRequested: self.confirmMap() self._mapRequested = None ############### CONFIRM VOTES", "Toggling to next mode. Killcam currently: %s' % now) if", "1 else: setting = 0 self.console.setCvar('scr_game_allowkillcam', int(setting)) def confirmScoreLimit(self): #", "it and/or modify # it under the terms of the", "^7 to vote', 'map': \"Map vote in progress: Change map", "to the Free Software # Foundation, Inc., 51 Franklin St,", "now == 0: setting = 1 self.console.setCvar('scr_team_fftype', int(setting)) def confirmKillCam(self):", "Boston, MA 02110-1301 USA # # Changelog: # v1.0.1 -", "in progress') return # Check if we have enough data", "% gametype) return False def sendBroadcast(self): # This wil broadcast", "elif len(match) == 0: client.message('^1ABORTED!^7No maps matching your request') return", "timer self.voteTimer() # Set person who called vote as yes", "free software; you can redistribute it and/or modify # it", "requirements for vote unmet. # v1.0.2 - Added \"!vote maps\"", "self._vote == 'nextmap': self.debug('nextmap vote passed. Params already stored') elif", "0 self._allplayers = [] for c in self.console.clients.getList(): if c.team", "ph03n1x # # This program is free software; you can", "it under the terms of the GNU General Public License", "= self._kickRequested.name else: self.debug('could not get the person to kick')", "self._vote == 'friendlyfire': self.confirmFriendlyFire() elif self._vote == 'killcam': self.confirmKillCam() elif", "program is distributed in the hope that it will be", "which player will be kicked if vote passed _default_messages =", "except ValueError: self._aVotes[entry.lower()] = adLvl[value] self.debug('Allowed votes are: %s' %", "0 else: self.debug('Unknown wanted setting for Friendlyfire. Toggling to next", "if client.maxLevel >= self._aVotes[voteType]: return True else: return False #################################################################################", "get [votetime] from settings. Using default: %s' % self._votetime) #", "def sendBroadcast(self): # This wil broadcast vote message to server.", "self.confirmVote() def cmd_allvotes(self, data, client, cmd=None): \"\"\"\\ Show all the", "= (', ').join(match) client.message('^1ABORTED!^7Multiple matches: %s' % match) return False", "if a == 'maprestart' or a == 'maprotate': self.console.say(self.getMessage(self._vote)) elif", "set new timelimit. Voted value is not integer') return cparams", "!= b3.TEAM_SPEC: playersInGame += 1 self._allplayers.insert(0, c) if playersInGame <=", "^7or ^2!no ^7 to vote', 'map': \"Map vote in progress:", "game _mapRequested = None # Stores which map is being", "cmd_yes(self, data, client, cmd=None): \"\"\"\\ Vote yes to the vote", "FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General", "1, self.cmd_allvotes, None) # Register events self.registerEvent('EVT_GAME_EXIT', self.onGameEnd) def onLoadConfig(self):", "denyVote(self): if self._vote: self.console.say('^3Vote failed!') self._vote = None self._value =", "# Check if player is allowed to vote if client", "client) # Let player know that vote is registered client.message('^3Your", "set round limit as gametype do not have rounds') def", "consolename in maplist.iteritems(): if partial in mapname: a.append(mapname) elif partial", "None # Stores which player will be kicked if vote", "Only players present at vote call can vote playersInGame =", "client.message('You do not have permission to call this vote') self._vote", "self.cmd_nextmap, 'nm') self.adminPlugin.registerCommand(self, 'maprotate', 20, self.cmd_maprotate, None) self.adminPlugin.registerCommand(self, 'allvotes', 1,", "= 0 elif now == 0: setting = 1 self.console.setCvar('scr_team_fftype',", "to call vote type v = self.checkIfAllowed(client, self._vote) if not", "scr_game_allowkillcam - 0 or 1 setting = self._value if self._value", "v1.0.1 - Fixed vote remaining in progress if requirements for", "like vote is ok. Broadcast to server self.sendBroadcast() # Start", "wantedMap) if len(match) == 1: self._mapRequested = match[0] self._value =", "admin!') self._vote = None self._value = None self._kickRequested = None", "to call \"\"\" allowed = [] for k in self._aVotes.keys():", "= None return # Check if enough players in game", "{} # All vote allowed maps. Imported from \"votemaps\" section", "cmd, level, func, alias) # Re-deploy commands for consideration of", "passed _default_messages = { 'tovote': '^7Use ^2!yes ^7or ^2!no ^7", "^2$s^7?\", 'killcam': \"Killcam vote in progress. Turn killcam ^2$s^7?\", 'scorelimit':", "\"\"\" # Check if vote already in progress if self._vote:", "# Seems like vote is ok. Broadcast to server self.sendBroadcast()", "silent=True/False, data) s = self._kickRequested self.debug('Kick vote passed. Kicking %s'", "current map?\", 'friendlyfire': \"Friendlyfire vote in progress. Change friendlyfire mode", "Show all the votes you are allowed to call \"\"\"", "allowed maps. Imported from \"votemaps\" section in config _amt_yes =", "is not integer') return if gt in amodes: cparams =", "player is allowed to vote if client not in self._allplayers:", "\"\"\"\\ Cycle to next map in rotation \"\"\" if self._mapRequested:", "== 'maps': self._vote = data[0] self._value = data[0] elif len(data)", "next map to ^3$s^7?\", 'kick': \"Kick vote in progress: Kick", "new round limit. Voted value is not integer') return if", "for more details. # # You should have received a", "data = data.split() if len(data) == 1 and data[0] ==", "len(data) == 1 and data[0] == 'maprotate' or len(data) ==", "= sp func = self.getCmd(cmd) if func: self.adminPlugin.registerCommand(self, cmd, level,", "Kick ^2$s^7?\", 'maprotate': \"Rotate map vote in progress. Go to", "'map') v2 = self.checkIfAllowed(client, 'nextmap') if v1 or v2: cmd.sayLoudOrPM(client,", "self._amt_yes or client not in self._amt_no: self._amt_yes.insert(0, client) # Let", "func: self.adminPlugin.registerCommand(self, cmd, level, func, alias) # Re-deploy commands for", "self.aquireCmdLock2(cmd, client, 60, True): client.message('^7Do not spam commands') return if", "or client not in self._amt_no: self._amt_yes.insert(0, client) # Let player", "'kick': self.confirmKick() elif self._vote == 'maprotate': if self._mapRequested: self.confirmMap() else:", "= match[0] self._value = match[0] return True elif len(match) >", "allowed. Imported from \"votes\" section in config _aMaps = {}", "new round length. Voted value is not integer') return if", "your option) any later version. # # This program is", "# Register events self.registerEvent('EVT_GAME_EXIT', self.onGameEnd) def onLoadConfig(self): # Load settings", "return cparams = 'scr_' + gt + '_timelimit' self.console.setCvar(cparams, setting)", "self._vote == 'roundlength': self.confirmRoundLength() elif self._vote == 'roundlimit': self.confirmRoundLimit() else:", "be voted on if self._vote == 'maps': v1 = self.checkIfAllowed(client,", "along with this program; if not, write to the Free", "in progress. Change score limit to ^2$s^7?\", 'timelimit': \"Timelimit vote", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "# Changelog: # v1.0.1 - Fixed vote remaining in progress", "(at your option) any later version. # # This program", "'reg': 2, 'mod': 20, 'admin': 40, 'fulladmin': 60, 'senioradmin': 80,", "= None return self._value = self._kickRequested.name else: self.debug('could not get", "None self._value = None return else: client.message('^2You do not have", "config _amt_yes = [] # Amount of players who voted", "self._vote not in self._aVotes: client.message('Vote type not allowed. Use ^2!allvotes", "scr_<gametype>_scorelimit <number> setting = self._value gt = self.getGameType() if not", "not have permission to call map votes') self._vote = None", "isinstance(setting, int): try: setting = int(setting) except ValueError: now =", ">= 20: return True elif cmd.time + 5 <= self.console.time():", "None return # Check if player has permission to call", "self.denyVote) t2.start() ######################### MAP HANDLING ############################## def _search(self, maplist, partial):", "as yes vote self._amt_yes.insert(0, client) if len(self._amt_yes) > (len(self._allplayers) /", "# Check if enough players in game to vote and", "'maprestart': \"Maprestart vote in progress. Restart current map?\", 'friendlyfire': \"Friendlyfire", "# Check if player has permission to call vote type", "on and off setting = self._value if not isinstance(setting, int):", "else: self.debug('Unknown wanted setting for Friendlyfire. Toggling to next mode')", "to write is scr_<gametype>_scorelimit <number> setting = self._value gt =", "5 <= self.console.time(): return True else: return False def checkIfAllowed(self,", "\"!vote maps\" to show what maps can be called into", "# Check if the player already voted if client in", "in consolename: a.append(mapname) return a def mapvote(self, client, wantedMap): #", "1 and data[0] == 'maps': self._vote = data[0] self._value =", "player has permission to call vote type v = self.checkIfAllowed(client,", "return # Check if player has permission to call vote", "Check if player is asking what maps can be voted", "self.console.setCvar('scr_game_allowkillcam', int(setting)) def confirmScoreLimit(self): # CVAR to write is scr_<gametype>_scorelimit", "Check if majority of players voted vNo = len(self._amt_no) vPass", "Value: %s' % (self._vote, self._value)) self._vote = None self._value =", "on server. \"\"\" # Check if vote already in progress", "as well. Changed to automatic yes vote. __version__ = '1.0.2'", "client not in self._allplayers: client.message('Sorry, you cannot enter current vote')", "return gametype else: self.debug('Error getting gametype. Response is %s' %", "= self.getGameType() if not isinstance(setting, int): try: setting = int(setting)", "to next map?\", 'maprestart': \"Maprestart vote in progress. Restart current", "self._kickRequested.name else: self.debug('could not get the person to kick') self._vote", "client.message('Allowed votes are: %s' % x) elif len(allowed) == 0:", "processing if self._vote == 'map' or self._vote == 'nextmap': q", "func = getattr(self, cmd) return func return None ######################### VOTE", "%s' % self._aVotes) def getCmd(self, cmd): cmd = 'cmd_%s' %", "vYes > vPass: self.confirmVote() def cmd_no(self, data, client=None, cmd=None): \"\"\"\\", "= self._value amodes = ['ctf', 'sd', 're', 'bas', 'dom'] gt", "sorted(allowed) x = (', ').join(p) client.message('Allowed votes are: %s' %", "to next mode. Killcam currently: %s' % now) if now", "else: self.debug('Error getting gametype. Response is %s' % gametype) return", "vote type v = self.checkIfAllowed(client, self._vote) if not v: client.message('You", "Register events self.registerEvent('EVT_GAME_EXIT', self.onGameEnd) def onLoadConfig(self): # Load settings section", "progress: Kick ^2$s^7?\", 'maprotate': \"Rotate map vote in progress. Go", "setting = self._value if not isinstance(setting, int): if self._value ==", "int): try: setting = int(setting) except ValueError: now = self.console.getCvar('scr_game_allowkillcam').getInt()", "gametype else: self.debug('Error getting gametype. Response is %s' % gametype)", "self._allplayers = [] def confirmKick(self): # Note - to kick", "\"Next map vote in progress. Change next map to ^3$s^7?\",", "allowed if self._vote not in self._aVotes: client.message('Vote type not allowed.", "vote as well. Changed to automatic yes vote. __version__ =", "data[0] == 'maps': self._vote = data[0] self._value = data[0] elif", "None return if self._vote == 'kick': self._kickRequested = self.adminPlugin.findClientPrompt(self._value, client)", "self._value gt = self.getGameType() if not isinstance(setting, int): try: setting", "client, 60, True): client.message('^7Do not spam commands') return if self._mapRequested:", "progress _value = None # Stores the value of the", "= adLvl[value] self.debug('Allowed votes are: %s' % self._aVotes) def getCmd(self,", "getting gametype. Response is %s' % gametype) return False def", "majority of players voted already vYes = len(self._amt_yes) vPass =", "self.console.time(): return True else: return False def checkIfAllowed(self, client, voteType):", "yes to the vote in progress \"\"\" # Check if", "elif self._vote == 'roundlimit': self.confirmRoundLimit() else: self.error('Unable to commit. Vote:", "0 if not isinstance(setting, int): try: setting = int(setting) except", "Cancel a vote in progress \"\"\" if self._vote: client.message('^3Vote canceled')", "in mapname: a.append(mapname) elif partial in consolename: a.append(mapname) return a", "== 'roundlimit': self.confirmRoundLimit() else: self.error('Unable to commit. Vote: %s, Value:", "= [] self._amt_yes = [] self._allplayers = [] def confirmKick(self):", "if player is asking what maps can be voted on", "None, True, '') self._kickRequested = None def confirmMap(self): # This", "should have received a copy of the GNU General Public", "setting = int(setting) except ValueError: self.debug('ERROR: Could not set new", "def cmd_vote(self, data, client, cmd=None): \"\"\"\\ !vote <setting> <value> -", "^7for available votes.') self._vote = None return # Check if", "_aVotes = {} # All votes allowed. Imported from \"votes\"", "vote is ok. Broadcast to server self.sendBroadcast() # Start timer", "if self._kickRequested: if self._kickRequested.maxLevel >= 20: client.message('^1ABORTED^7: Cannot vote to", "VOTE TIMING ############################## def voteTimer(self): t1 = threading.Timer((self._votetime - 5),", "progress. Go to next map?\", 'maprestart': \"Maprestart vote in progress.", "if there is a vote in progress if not self._vote:", "%s' % gametype) return False def sendBroadcast(self): # This wil", "not set new round length. Voted value is not integer')", "Re-deploy commands for consideration of this plugin self.adminPlugin.registerCommand(self, 'nextmap', 1,", "None return # Check if type of vote is allowed", "vote if client not in self._allplayers: client.message('Sorry, you cannot enter", "seconds until vote end!') t2 = threading.Timer(10, self.denyVote) t2.start() #########################", "sp = cmd.split('-') alias = None if len(sp) == 2:", "# Copyright (C) 2015 ph03n1x # # This program is", "cmd.sayLoudOrPM(client, 'Vote enabled maps: ^2%s' % (('^7, ^2').join(self._aMaps.keys()))) self._vote =", "for maps. mapvote turned out false') self._vote = None return", "# CVAR to write is scr_<gametype>_scorelimit <number> setting = self._value", "'_timelimit' self.console.setCvar(cparams, setting) def confirmRoundLength(self): setting = self._value amodes =", "if requirements for vote unmet. # v1.0.2 - Added \"!vote", "== 'nextmap': self.debug('nextmap vote passed. Params already stored') elif self._vote", "'roundlength': self.confirmRoundLength() elif self._vote == 'roundlimit': self.confirmRoundLimit() else: self.error('Unable to", "from \"votemaps\" section in config _amt_yes = [] # Amount", "vote in progress. Restart current map?\", 'friendlyfire': \"Friendlyfire vote in", "wanted setting for Friendlyfire. Toggling to next mode') now =", "is asking what maps can be voted on if self._vote", "client.kick(reason, keyword, admin, silent=True/False, data) s = self._kickRequested self.debug('Kick vote", "\"Kick vote in progress: Kick ^2$s^7?\", 'maprotate': \"Rotate map vote", "self._vote == 'roundlimit': self.confirmRoundLimit() else: self.error('Unable to commit. Vote: %s,", "_kickRequested = None # Stores which player will be kicked", "'maprotate': param = {'s': a} self.console.say(self.getMessage(self._vote, param)) self.console.say(self.getMessage('tovote')) def aquireCmdLock2(self,", "vote has been entered') # Check if majority of players", "int(setting)) def confirmScoreLimit(self): # CVAR to write is scr_<gametype>_scorelimit <number>", "Cannot vote to kick admin!') self._vote = None self._value =", "amodes = ['ctf', 'sd', 're', 'bas', 'dom'] gt = self.getGameType()", "self._amt_yes = [] self._allplayers = [] def denyVote(self): if self._vote:", "now) if now == 0: setting = 1 else: setting", "# v1.0.1 - Fixed vote remaining in progress if requirements", "Kicking %s' % s.name) s.kick('Voted against', '', None, True, '')", "already vYes = len(self._amt_yes) vPass = len(self._allplayers) / 2 if", "self._value == 'off': setting = 0 if not isinstance(setting, int):", "show what maps can be called into vote. # -", "^2%s' % (('^7, ^2').join(self._aMaps.keys()))) self._vote = None self._value = None", "return # Check if player is allowed to vote if", "next map in rotation \"\"\" if not self.aquireCmdLock2(cmd, client, 60,", "'roundlimit': self.confirmRoundLimit() else: self.error('Unable to commit. Vote: %s, Value: %s'", "self._kickRequested = None def confirmMap(self): # This will cycle to", "<value> - vote to change setting or cvar on server.", "+ gt + '_timelimit' self.console.setCvar(cparams, setting) def confirmRoundLength(self): setting =", "# Check if vote already in progress if self._vote: client.message('^1ERROR^7:", "if self._vote == 'map' or self._vote == 'nextmap': q =", "\"\"\" if self._vote: client.message('^3Vote canceled') self.denyVote() elif not self._vote: client.message('^3No", "class CodvotePlugin(b3.plugin.Plugin): adminPlugin = None _vote = None # Stores", "'allvotes', 1, self.cmd_allvotes, None) # Register events self.registerEvent('EVT_GAME_EXIT', self.onGameEnd) def", "threading import b3.plugin import b3.events class CodvotePlugin(b3.plugin.Plugin): adminPlugin = None", "Public License for more details. # # You should have", "and client.maxLevel < 100: client.message('^1ABORT^7: Not enough players in game", "who called vote as yes vote self._amt_yes.insert(0, client) if len(self._amt_yes)", "players in game to vote.') self._vote = None return #", "integer') return if gt in amodes: cparams = 'scr_' +", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #", "self._vote == 'scorelimit': self.confirmScoreLimit() elif self._vote == 'timelimit': self.confirmTimeLimit() elif", "= self.checkIfAllowed(client, self._vote) if not v: client.message('You do not have", "current vote') return # Check if the player already voted", "elif self._vote == 'friendlyfire': self.confirmFriendlyFire() elif self._vote == 'killcam': self.confirmKillCam()", "== 'nextmap': q = self.mapvote(client, self._value) if not q: self.debug('Vote", "GNU General Public License # along with this program; if", "- vote to change setting or cvar on server. \"\"\"", "setting = self._value if self._value == 'on': setting = 1", "permission to call vote type v = self.checkIfAllowed(client, self._vote) if", "ValueError: now = self.console.getCvar('scr_game_allowkillcam').getInt() self.debug('Setting being voted for is not", "This will toggle friendly fire on and off setting =", "if self.config.has_section('votemaps'): for (mapname, consolename) in self.config.items('votemaps'): if mapname: self._aMaps[mapname]", "Voted value is not integer') return if gt in amodes:", "is ok. Broadcast to server self.sendBroadcast() # Start timer self.voteTimer()", "'admin': 40, 'fulladmin': 60, 'senioradmin': 80, 'superadmin': 100} for (entry,", "level, func, alias) # Re-deploy commands for consideration of this", "player know that vote is registered client.message('^3Your vote has been", "mapname = self.console.getNextMap() if mapname: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' %", "= None return else: client.message('^2You do not have permission to", "config _aMaps = {} # All vote allowed maps. Imported", "Use ^2!allvotes ^7for available votes.') self._vote = None return #", "in progress _value = None # Stores the value of", "vPass: self.confirmVote() def cmd_no(self, data, client=None, cmd=None): \"\"\"\\ Vote NO", "len(data) == 2: type = data[0] value = data[1] self._vote", "def cmd_yes(self, data, client, cmd=None): \"\"\"\\ Vote yes to the", "client.message('^1ABORT^7: Not enough players in game to vote.') self._vote =", "limit to ^2$s^7?\", 'timelimit': \"Timelimit vote in progress. Change time", "client.message('^3Your vote has been entered') # Check if majority of", "self._mapRequested: self.confirmMap() else: self.console.rotateMap() def cmd_veto(self, data, client, cmd=None): \"\"\"\\", "self._vote == 'maprotate': if self._mapRequested: self.confirmMap() else: self.console.rotateMap() elif self._vote", "20, self.cmd_maprotate, None) self.adminPlugin.registerCommand(self, 'allvotes', 1, self.cmd_allvotes, None) # Register", "'_roundlimit' self.console.setCvar(cparams, setting) else: self.debug('Could not set round limit as", "cmd): func = getattr(self, cmd) return func return None #########################", "self.console.getCvar('scr_team_fftype').getInt() if now >= 1: setting = 0 elif now", "Vote: %s, Value: %s' % (self._vote, self._value)) self._vote = None", "rounds') def getGameType(self): gametype = self.console.getCvar('g_gametype').getString() if gametype: return gametype", "gt = self.getGameType() if not isinstance(setting, int): try: setting =", "None return else: client.message('^2You do not have permission to call", "self._mapRequested = None def confirmMaprestart(self): # This will restart the", "if 'commands' in self.config.sections(): for cmd in self.config.options('commands'): level =", "%s' % now) if now == 0: setting = 1", "def onGameEnd(self, event): \"\"\" Handle EVT_GAME_ROUND_END \"\"\" if self._mapRequested: self.confirmMap()", "'killcam': self.confirmKillCam() elif self._vote == 'scorelimit': self.confirmScoreLimit() elif self._vote ==", "% self._votetime) # Load votemaps section if self.config.has_section('votemaps'): for (mapname,", "[] self._allplayers = [] def denyVote(self): if self._vote: self.console.say('^3Vote failed!')", "- Added \"!vote maps\" to show what maps can be", "match) return False elif len(match) == 0: client.message('^1ABORTED!^7No maps matching", "elif len(match) > 1: match = (', ').join(match) client.message('^1ABORTED!^7Multiple matches:", "passed. Kicking %s' % s.name) s.kick('Voted against', '', None, True,", "You already voted!') return elif client not in self._amt_yes or", "= { 'tovote': '^7Use ^2!yes ^7or ^2!no ^7 to vote',", "if gt in amodes: cparams = 'scr_' + gt +", "return False ################################################################################# # COMMANDS # ################################################################################# def cmd_vote(self, data,", "do not have rounds') def getGameType(self): gametype = self.console.getCvar('g_gametype').getString() if", "= {'s': a} self.console.say(self.getMessage(self._vote, param)) self.console.say(self.getMessage('tovote')) def aquireCmdLock2(self, cmd, client,", "type not allowed. Use ^2!allvotes ^7for available votes.') self._vote =", "value of the vote _votetime = 30 # Time before", "\"votemaps\" section in config _amt_yes = [] # Amount of", "int(setting) except ValueError: self.debug('ERROR: Could not set new timelimit. Voted", "= int(value) self._aVotes[entry.lower()] = value except ValueError: self._aVotes[entry.lower()] = adLvl[value]", "# v1.0.2 - Added \"!vote maps\" to show what maps", "b3.events class CodvotePlugin(b3.plugin.Plugin): adminPlugin = None _vote = None #", "== 'maprestart': self.confirmMaprestart() elif self._vote == 'friendlyfire': self.confirmFriendlyFire() elif self._vote", "progress. Change round length to ^2$s^7?\", 'roundlimit': \"Round limit vote", "self._votetime = self.config.getint('settings', 'votetime') except: self.debug('Unable to get [votetime] from", "vote in progress. Go to next map?\", 'maprestart': \"Maprestart vote", "client.message('No vote in progress') return # Check if player is", "adminPlugin = None _vote = None # Stores which vote", "not, write to the Free Software # Foundation, Inc., 51", "self.console.setCvar(cparams, setting) else: self.debug('Could not set round limit as gametype", "'off': setting = 0 else: self.debug('Unknown wanted setting for Friendlyfire.", "data, client, cmd=None): \"\"\"\\ !vote <setting> <value> - vote to", "data[0] == 'maprestart' or len(data) == 1 and data[0] ==", "not isinstance(setting, int): try: setting = int(setting) except ValueError: self.debug('Could", "# CoDVote plugin for BigBrotherBot(B3) (www.bigbrotherbot.net) # Copyright (C) 2015", "!= 'maprestart' and a != 'maprotate': param = {'s': a}", "= match[0] return True elif len(match) > 1: match =", "+= 1 self._allplayers.insert(0, c) if playersInGame <= 1 and client.maxLevel", "will restart the current map self.console.write('fast_restart') def confirmFriendlyFire(self): # This", "None self._value = None return # Check if enough players", "value else: client.message('^1ERROR^7: Invalid usage. Type ^2!help vote ^7for info')", "None # Stores the value of the vote _votetime =", "self._vote = None return # Check if type of vote", "player will be kicked if vote passed _default_messages = {", "for k in self._aVotes.keys(): if client.maxLevel >= self._aVotes[k]: allowed.insert(0, k)", "version 2 of the License, or # (at your option)", "self._vote == 'map' or self._vote == 'nextmap': q = self.mapvote(client,", "== 0: setting = 1 self.console.setCvar('scr_team_fftype', int(setting)) def confirmKillCam(self): #", "find admin plugin') return # Register commands if 'commands' in", "if self._kickRequested.maxLevel >= 20: client.message('^1ABORTED^7: Cannot vote to kick admin!')", "self._mapRequested: self.confirmMap() self._mapRequested = None ############### CONFIRM VOTES ###################### def", "== 'maprestart' or len(data) == 1 and data[0] == 'maps':", "== 1 and data[0] == 'maprestart' or len(data) == 1", "self._vote == 'nextmap': q = self.mapvote(client, self._value) if not q:", "vote') return # Check if the player already voted if", "# Load settings section try: self._votetime = self.config.getint('settings', 'votetime') except:", "not in self._amt_no: self._amt_no.insert(0, client) # Let player know that", "_votetime = 30 # Time before a vote will be", "'kick': self._kickRequested = self.adminPlugin.findClientPrompt(self._value, client) if self._kickRequested: if self._kickRequested.maxLevel >=", "= None # Stores the value of the vote _votetime", "to ^3$s^7?\", 'nextmap': \"Next map vote in progress. Change next", "for (entry, value) in self.config.items('votes'): try: value = int(value) self._aVotes[entry.lower()]", "^2%s' % self._mapRequested.title()) return mapname = self.console.getNextMap() if mapname: cmd.sayLoudOrPM(client,", "version. # # This program is distributed in the hope", "# GNU General Public License for more details. # #", "'timelimit': self.confirmTimeLimit() elif self._vote == 'roundlength': self.confirmRoundLength() elif self._vote ==", "self._value amodes = ['ctf', 'sd', 're', 'bas', 'dom'] gt =", "write is scr_<gametype>_scorelimit <number> setting = self._value gt = self.getGameType()", "0 self.console.setCvar('scr_game_allowkillcam', int(setting)) def confirmScoreLimit(self): # CVAR to write is", "vote message to server. a = self._value if a ==", "None self._value = None self._amt_no = [] self._amt_yes = []", "# Check if we have enough data for vote data", "^2%s' % mapname) else: client.message('^1Error:^7 could not get map list')", "progress \"\"\" # Check if there is a vote in", "b3, threading import b3.plugin import b3.events class CodvotePlugin(b3.plugin.Plugin): adminPlugin =", "Map: ^2%s' % mapname) else: client.message('^1Error:^7 could not get map", "# COMMANDS # ################################################################################# def cmd_vote(self, data, client, cmd=None): \"\"\"\\", "60, True): client.message('^7Do not spam commands') return if self._mapRequested: cmd.sayLoudOrPM(client,", "if client.maxLevel >= 20: return True elif cmd.time + 5", "> 1: match = (', ').join(match) client.message('^1ABORTED!^7Multiple matches: %s' %", "call any votes') def cmd_yes(self, data, client, cmd=None): \"\"\"\\ Vote", "# Re-deploy commands for consideration of this plugin self.adminPlugin.registerCommand(self, 'nextmap',", "mapvote(self, client, wantedMap): # Find if map is in allowed", "c in self.console.clients.getList(): if c.team != b3.TEAM_SPEC: playersInGame += 1", "# Load votes section if self.config.has_section('votes'): adLvl = {'guest': 0,", "in maplist.iteritems(): if partial in mapname: a.append(mapname) elif partial in", "== 'off': setting = 0 if not isinstance(setting, int): try:", "been entered') # Check if majority of players voted vNo", "General Public License for more details. # # You should", "enough data for vote data = data.split() if len(data) ==", "= value else: client.message('^1ERROR^7: Invalid usage. Type ^2!help vote ^7for", "<= 1 and client.maxLevel < 100: client.message('^1ABORT^7: Not enough players", "= [] self._amt_yes = [] self._allplayers = [] def denyVote(self):", "== 0: client.message('You are not allowed to call any votes')", "in game _amt_no = [] _allplayers = [] # Amount", "to server. a = self._value if a == 'maprestart' or", "import b3.events class CodvotePlugin(b3.plugin.Plugin): adminPlugin = None _vote = None", "maps. mapvote turned out false') self._vote = None return if", "+ 5 <= self.console.time(): return True else: return False def", "cmd_veto(self, data, client, cmd=None): \"\"\"\\ Cancel a vote in progress", "Set person who called vote as yes vote self._amt_yes.insert(0, client)", "'timelimit': \"Timelimit vote in progress. Change time limit to ^2$s^7?\",", "progress. Change score limit to ^2$s^7?\", 'timelimit': \"Timelimit vote in", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR", "Stores which map is being voted for _kickRequested = None", "need: client.kick(reason, keyword, admin, silent=True/False, data) s = self._kickRequested self.debug('Kick", "= 'cmd_%s' % cmd if hasattr(self, cmd): func = getattr(self,", "limit vote in progress. Change round limit to ^2$s^7?\", }", "def aquireCmdLock2(self, cmd, client, delay, all=True): if client.maxLevel >= 20:", "if client in self._amt_yes or client in self._amt_no: client.message('Are you", "self.getCmd(cmd) if func: self.adminPlugin.registerCommand(self, cmd, level, func, alias) # Re-deploy", "return # Register commands if 'commands' in self.config.sections(): for cmd", "level = self.config.get('commands', cmd) sp = cmd.split('-') alias = None", "kick admin!') self._vote = None self._value = None self._kickRequested =", "self._value == 'off': setting = 0 else: self.debug('Unknown wanted setting", "a} self.console.say(self.getMessage(self._vote, param)) self.console.say(self.getMessage('tovote')) def aquireCmdLock2(self, cmd, client, delay, all=True):", "len(data) == 1 and data[0] == 'maps': self._vote = data[0]", "person to kick') self._vote = None self._value = None self._kickRequested", "self._vote = None self._value = None self._kickRequested = None return", "kick') self._vote = None self._value = None self._kickRequested = None", "not get map list') def cmd_maprotate(self, data, client, cmd=None): \"\"\"\\", "progress. Change round limit to ^2$s^7?\", } def onStartup(self): self.adminPlugin", "None if len(sp) == 2: cmd, alias = sp func", "'fulladmin': 60, 'senioradmin': 80, 'superadmin': 100} for (entry, value) in", "self.debug('Unknown wanted setting for Friendlyfire. Toggling to next mode') now", "len(self._amt_yes) vPass = len(self._allplayers) / 2 if vYes > vPass:", "a.append(mapname) return a def mapvote(self, client, wantedMap): # Find if", "players in game _mapRequested = None # Stores which map", "[votetime] from settings. Using default: %s' % self._votetime) # Load", "map?\", 'maprestart': \"Maprestart vote in progress. Restart current map?\", 'friendlyfire':", "setting = 0 elif now == 0: setting = 1", "self._amt_yes or client in self._amt_no: client.message('Are you drunk? You already", "Response is %s' % gametype) return False def sendBroadcast(self): #", "Vote yes to the vote in progress \"\"\" # Check", "self._vote: client.message('^1ERROR^7: Vote already in progress') return # Check if", "None # Stores which map is being voted for _kickRequested", "'maprotate' or len(data) == 1 and data[0] == 'maprestart' or", "= ['ctf', 'sd', 're', 'bas', 'dom'] gt = self.getGameType() if", "if not isinstance(setting, int): try: setting = int(setting) except ValueError:", "and a != 'maprotate': param = {'s': a} self.console.say(self.getMessage(self._vote, param))", "0: client.message('You are not allowed to call any votes') def", "Load votemaps section if self.config.has_section('votemaps'): for (mapname, consolename) in self.config.items('votemaps'):", "modify # it under the terms of the GNU General", "'bas', 'dom'] gt = self.getGameType() if not isinstance(setting, int): try:", "self.adminPlugin.findClientPrompt(self._value, client) if self._kickRequested: if self._kickRequested.maxLevel >= 20: client.message('^1ABORTED^7: Cannot", "data, client=None, cmd=None): \"\"\"\\ - list the next map in", "Change map to ^3$s^7?\", 'nextmap': \"Next map vote in progress.", "timelimit. Voted value is not integer') return cparams = 'scr_'", "vote is registered client.message('^3Your vote has been entered') # Check", "% cmd if hasattr(self, cmd): func = getattr(self, cmd) return", "allowed.insert(0, k) if len(allowed) > 0: p = sorted(allowed) x", "+ gt + '_roundlimit' self.console.setCvar(cparams, setting) else: self.debug('Could not set", "def confirmKillCam(self): # rcon for killcam: scr_game_allowkillcam - 0 or", "gametype. Response is %s' % gametype) return False def sendBroadcast(self):", "confirmRoundLimit(self): setting = self._value amodes = ['ctf', 'sd', 're', 'bas',", "_mapRequested = None # Stores which map is being voted", "').join(p) client.message('Allowed votes are: %s' % x) elif len(allowed) ==", "x = (', ').join(p) client.message('Allowed votes are: %s' % x)", "self._value = match[0] return True elif len(match) > 1: match", "= None self._value = None self._kickRequested = None return self._value", "to vote if client not in self._allplayers: client.message('Sorry, you cannot", "map votes') self._vote = None self._value = None return #", "vote is allowed if self._vote not in self._aVotes: client.message('Vote type", "remaining in progress if requirements for vote unmet. # v1.0.2", "\"Map vote in progress: Change map to ^3$s^7?\", 'nextmap': \"Next", "v1 = self.checkIfAllowed(client, 'map') v2 = self.checkIfAllowed(client, 'nextmap') if v1", "yes. Checked against amount of players in game _amt_no =", "in self._amt_no: self._amt_yes.insert(0, client) # Let player know that vote", "= [] self._allplayers = [] def denyVote(self): if self._vote: self.console.say('^3Vote", "delay, all=True): if client.maxLevel >= 20: return True elif cmd.time", "vote already in progress if self._vote: client.message('^1ERROR^7: Vote already in", "- list the next map in rotation \"\"\" if not", "when needed. self.console.write('map %s' % self._aMaps[self._mapRequested]) self._mapRequested = None def", "if len(allowed) > 0: p = sorted(allowed) x = (',", "setting = 0 if not isinstance(setting, int): try: setting =", "None # Stores which vote is currently in progress _value", "v: client.message('You do not have permission to call this vote')", "# - Fixed issue where person who called vote needed", "return # Check if type of vote is allowed if", "client in self._amt_no: client.message('Are you drunk? You already voted!') return", "enter current vote') return # Check if the player already", "= 30 # Time before a vote will be canceled", "Stores the value of the vote _votetime = 30 #", "try: value = int(value) self._aVotes[entry.lower()] = value except ValueError: self._aVotes[entry.lower()]", "data[0] elif len(data) == 2: type = data[0] value =", "= int(setting) except ValueError: self.debug('Could not set new round limit.", "in rotation \"\"\" if not self.aquireCmdLock2(cmd, client, 60, True): client.message('^7Do", "self._value if a == 'maprestart' or a == 'maprotate': self.console.say(self.getMessage(self._vote))", "and data[0] == 'maprotate' or len(data) == 1 and data[0]", "Load settings section try: self._votetime = self.config.getint('settings', 'votetime') except: self.debug('Unable", "ValueError: self.debug('ERROR: Could not set new scorelimit. Voted value is", "in config _aMaps = {} # All vote allowed maps.", "= 'scr_' + gt + '_roundlimit' self.console.setCvar(cparams, setting) else: self.debug('Could", "NO to the current vote \"\"\" # Check if there", "== 1 and data[0] == 'maps': self._vote = data[0] self._value", "setting) def confirmRoundLimit(self): setting = self._value amodes = ['ctf', 'sd',", "None return # Get further info for proper processing if", "def confirmScoreLimit(self): # CVAR to write is scr_<gametype>_scorelimit <number> setting", ">= self._aVotes[k]: allowed.insert(0, k) if len(allowed) > 0: p =", "= type self._value = value else: client.message('^1ERROR^7: Invalid usage. Type", "if client not in self._allplayers: client.message('Sorry, you cannot enter current", "is allowed to vote if client not in self._allplayers: client.message('Sorry,", "game to vote and store present players. Only players present", "self.denyVote() def cmd_nextmap(self, data, client=None, cmd=None): \"\"\"\\ - list the", "Find if map is in allowed list match = self._search(self._aMaps,", "are allowed to call \"\"\" allowed = [] for k", "not in self._amt_yes or client not in self._amt_no: self._amt_yes.insert(0, client)", "that vote is registered client.message('^3Your vote has been entered') #", "= threading.Timer((self._votetime - 5), self.voteMessage) t1.start() def voteMessage(self): if self._vote:", "for cmd in self.config.options('commands'): level = self.config.get('commands', cmd) sp =", "event): \"\"\" Handle EVT_GAME_ROUND_END \"\"\" if self._mapRequested: self.confirmMap() self._mapRequested =", "\"\"\"\\ Vote yes to the vote in progress \"\"\" #", "self.console.rotateMap() elif self._vote == 'maprestart': self.confirmMaprestart() elif self._vote == 'friendlyfire':", "of the vote _votetime = 30 # Time before a", "Cycle to next map in rotation \"\"\" if self._mapRequested: self.confirmMap()", "self._value = self._kickRequested.name else: self.debug('could not get the person to", "if mapname: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % mapname) else: client.message('^1Error:^7", "'roundlength': \"Round length vote in progress. Change round length to", "0 elif now == 0: setting = 1 self.console.setCvar('scr_team_fftype', int(setting))", "mapname: self._aMaps[mapname] = consolename self.debug('Successfully entered maps for voting: %s'", "type v = self.checkIfAllowed(client, self._vote) if not v: client.message('You do", "'Vote enabled maps: ^2%s' % (('^7, ^2').join(self._aMaps.keys()))) self._vote = None", "data, client=None, cmd=None): \"\"\"\\ Vote NO to the current vote", "you are allowed to call \"\"\" allowed = [] for", "except ValueError: now = self.console.getCvar('scr_game_allowkillcam').getInt() self.debug('Setting being voted for is", "wil broadcast vote message to server. a = self._value if", "self._vote) if not v: client.message('You do not have permission to", "client, cmd=None): \"\"\"\\ Vote yes to the vote in progress", "info') return # Check if player is asking what maps", "= data[0] elif len(data) == 2: type = data[0] value", "== 'map' or self._vote == 'nextmap': q = self.mapvote(client, self._value)", "'maprestart' and a != 'maprotate': param = {'s': a} self.console.say(self.getMessage(self._vote,", "20: return True elif cmd.time + 5 <= self.console.time(): return", "who voted yes. Checked against amount of players in game", "self.console.setCvar(cparams, setting) def confirmTimeLimit(self): setting = self._value gt = self.getGameType()", "= self.console.getNextMap() if mapname: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % mapname)", "not spam commands') return if self._mapRequested: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s'", "broadcast vote message to server. a = self._value if a", "or len(data) == 1 and data[0] == 'maps': self._vote =", "self.debug('ERROR: Could not set new timelimit. Voted value is not", "2 if vNo > vPass: self.denyVote() def cmd_nextmap(self, data, client=None,", "None) # Register events self.registerEvent('EVT_GAME_EXIT', self.onGameEnd) def onLoadConfig(self): # Load", "the vote in progress \"\"\" # Check if there is", "if self.config.has_section('votes'): adLvl = {'guest': 0, 'user': 1, 'reg': 2,", "it will be useful, # but WITHOUT ANY WARRANTY; without", "cycle to next map when needed. self.console.write('map %s' % self._aMaps[self._mapRequested])", "gametype: return gametype else: self.debug('Error getting gametype. Response is %s'", "will be kicked if vote passed _default_messages = { 'tovote':", "% s.name) s.kick('Voted against', '', None, True, '') self._kickRequested =", "enough players in game to vote and store present players.", "section if self.config.has_section('votes'): adLvl = {'guest': 0, 'user': 1, 'reg':", "ValueError: self.debug('ERROR: Could not set new timelimit. Voted value is", "if player is allowed to vote if client not in", "vote') return # Check if the player already voted. If", "# it under the terms of the GNU General Public", "limit. Voted value is not integer') return if gt in", "<number> setting = self._value gt = self.getGameType() if not isinstance(setting,", "hope that it will be useful, # but WITHOUT ANY", "t1.start() def voteMessage(self): if self._vote: self.console.say('^110 seconds until vote end!')", "== 0: client.message('^1ABORTED!^7No maps matching your request') return False ###############", "do not have permission to call map votes') self._vote =", "self.confirmVote() def cmd_no(self, data, client=None, cmd=None): \"\"\"\\ Vote NO to", "cmd) return func return None ######################### VOTE TIMING ############################## def", "self._amt_no = [] self._amt_yes = [] self._allplayers = [] def", "any votes') def cmd_yes(self, data, client, cmd=None): \"\"\"\\ Vote yes", "if client.maxLevel >= self._aVotes[k]: allowed.insert(0, k) if len(allowed) > 0:", "vote will be canceled for not passing _aVotes = {}", "self._mapRequested.title()) return mapname = self.console.getNextMap() if mapname: cmd.sayLoudOrPM(client, '^7Next Map:", "<setting> <value> - vote to change setting or cvar on", "elif self._vote == 'scorelimit': self.confirmScoreLimit() elif self._vote == 'timelimit': self.confirmTimeLimit()", "== 'on': setting = 1 elif self._value == 'off': setting", "round limit to ^2$s^7?\", } def onStartup(self): self.adminPlugin = self.console.getPlugin('admin')", "self._aMaps[self._mapRequested]) self._mapRequested = None def confirmMaprestart(self): # This will restart", "%s' % self._aMaps) # Load votes section if self.config.has_section('votes'): adLvl", "gt in amodes: cparams = 'scr_' + gt + '_roundlimit'", "usage. Type ^2!help vote ^7for info') return # Check if", "%s' % (self._vote, self._value)) self._vote = None self._value = None", "list') def cmd_maprotate(self, data, client, cmd=None): \"\"\"\\ Cycle to next", "getGameType(self): gametype = self.console.getCvar('g_gametype').getString() if gametype: return gametype else: self.debug('Error", "until vote end!') t2 = threading.Timer(10, self.denyVote) t2.start() ######################### MAP", "% self._mapRequested.title()) return mapname = self.console.getNextMap() if mapname: cmd.sayLoudOrPM(client, '^7Next", "not get the person to kick') self._vote = None self._value", "progress \"\"\" if self._vote: client.message('^3Vote canceled') self.denyVote() elif not self._vote:", "k in self._aVotes.keys(): if client.maxLevel >= self._aVotes[k]: allowed.insert(0, k) if", "General Public License # along with this program; if not,", "# Note - to kick someone we need: client.kick(reason, keyword,", "already voted!') return elif client not in self._amt_yes or client", "= None return # Seems like vote is ok. Broadcast", "client.message('^1ABORTED!^7No maps matching your request') return False ############### NEXTMAP FUNCTIONING", "= consolename self.debug('Successfully entered maps for voting: %s' % self._aMaps)", "= self._search(self._aMaps, wantedMap) if len(match) == 1: self._mapRequested = match[0]", "of players voted vNo = len(self._amt_no) vPass = len(self._allplayers) /", "= self.checkIfAllowed(client, 'map') v2 = self.checkIfAllowed(client, 'nextmap') if v1 or", "client.message('^7Do not spam commands') return if self._mapRequested: cmd.sayLoudOrPM(client, '^7Next Map:", "self.onGameEnd) def onLoadConfig(self): # Load settings section try: self._votetime =", "in rotation \"\"\" if self._mapRequested: self.confirmMap() else: self.console.rotateMap() def cmd_veto(self,", "data, client, cmd=None): \"\"\"\\ Cycle to next map in rotation", "= [] def confirmKick(self): # Note - to kick someone", "== 'maps': v1 = self.checkIfAllowed(client, 'map') v2 = self.checkIfAllowed(client, 'nextmap')", "self._amt_yes = [] self._allplayers = [] def confirmKick(self): # Note", "PURPOSE. See the # GNU General Public License for more", "self._vote = None self._value = None self._amt_no = [] self._amt_yes", "except ValueError: self.debug('ERROR: Could not set new scorelimit. Voted value", "data for vote data = data.split() if len(data) == 1", "Free Software Foundation; either version 2 of the License, or", "= '1.0.2' __author__ = 'ph03n1x' import b3, threading import b3.plugin", "1, self.cmd_nextmap, 'nm') self.adminPlugin.registerCommand(self, 'maprotate', 20, self.cmd_maprotate, None) self.adminPlugin.registerCommand(self, 'allvotes',", "current vote') return # Check if the player already voted.", "self.config.getint('settings', 'votetime') except: self.debug('Unable to get [votetime] from settings. Using", "%s' % x) elif len(allowed) == 0: client.message('You are not", "redistribute it and/or modify # it under the terms of", "int(setting) except ValueError: self.debug('Could not set new round limit. Voted", "matches: %s' % match) return False elif len(match) == 0:", "None def confirmMaprestart(self): # This will restart the current map", "elif self._vote == 'maprestart': self.confirmMaprestart() elif self._vote == 'friendlyfire': self.confirmFriendlyFire()", "players in game _amt_no = [] _allplayers = [] #", "to kick someone we need: client.kick(reason, keyword, admin, silent=True/False, data)", "client, cmd=None): \"\"\"\\ Cancel a vote in progress \"\"\" if", "_search(self, maplist, partial): a = [] for mapname, consolename in", "if partial in mapname: a.append(mapname) elif partial in consolename: a.append(mapname)", "by # the Free Software Foundation; either version 2 of", "if self._vote == 'maps': v1 = self.checkIfAllowed(client, 'map') v2 =", "int): try: setting = int(setting) except ValueError: self.debug('ERROR: Could not", "info for proper processing if self._vote == 'map' or self._vote", "Not enough players in game to vote.') self._vote = None", "= len(self._allplayers) / 2 if vNo > vPass: self.denyVote() def", "== 1 and data[0] == 'maprotate' or len(data) == 1", "'maprotate': \"Rotate map vote in progress. Go to next map?\",", "value) in self.config.items('votes'): try: value = int(value) self._aVotes[entry.lower()] = value", "self._votetime) # Load votemaps section if self.config.has_section('votemaps'): for (mapname, consolename)", "have permission to call map votes') self._vote = None self._value", "integer') return cparams = 'scr_' + gt + '_scorelimit' self.console.setCvar(cparams,", "the hope that it will be useful, # but WITHOUT", "ValueError: self._aVotes[entry.lower()] = adLvl[value] self.debug('Allowed votes are: %s' % self._aVotes)", "in self.config.sections(): for cmd in self.config.options('commands'): level = self.config.get('commands', cmd)", "partial in consolename: a.append(mapname) return a def mapvote(self, client, wantedMap):", "vote in progress if not self._vote: client.message('No vote in progress')", "vote in progress \"\"\" if self._vote: client.message('^3Vote canceled') self.denyVote() elif", "in self._aVotes: client.message('Vote type not allowed. Use ^2!allvotes ^7for available", "not in self._aVotes: client.message('Vote type not allowed. Use ^2!allvotes ^7for", "# Check if player is asking what maps can be", "against', '', None, True, '') self._kickRequested = None def confirmMap(self):", "def confirmFriendlyFire(self): # This will toggle friendly fire on and", "= 'scr_' + gt + '_timelimit' self.console.setCvar(cparams, setting) def confirmRoundLength(self):", "self._value = value else: client.message('^1ERROR^7: Invalid usage. Type ^2!help vote", "'nextmap': self.debug('nextmap vote passed. Params already stored') elif self._vote ==", "message to server. a = self._value if a == 'maprestart'", "'^7Next Map: ^2%s' % mapname) else: client.message('^1Error:^7 could not get", "Software Foundation; either version 2 of the License, or #", "self._vote: self.console.say('^110 seconds until vote end!') t2 = threading.Timer(10, self.denyVote)", "= 'ph03n1x' import b3, threading import b3.plugin import b3.events class", "or 1 setting = self._value if self._value == 'on': setting", "# rcon for killcam: scr_game_allowkillcam - 0 or 1 setting", "return self._value = self._kickRequested.name else: self.debug('could not get the person", "well. Changed to automatic yes vote. __version__ = '1.0.2' __author__", "'nextmap': q = self.mapvote(client, self._value) if not q: self.debug('Vote aborted:", "new scorelimit. Voted value is not integer') return cparams =", "is scr_<gametype>_scorelimit <number> setting = self._value gt = self.getGameType() if", "= threading.Timer(10, self.denyVote) t2.start() ######################### MAP HANDLING ############################## def _search(self,", "onGameEnd(self, event): \"\"\" Handle EVT_GAME_ROUND_END \"\"\" if self._mapRequested: self.confirmMap() self._mapRequested", "client not in self._amt_no: self._amt_yes.insert(0, client) # Let player know", "present players. Only players present at vote call can vote", "distributed in the hope that it will be useful, #", "onLoadConfig(self): # Load settings section try: self._votetime = self.config.getint('settings', 'votetime')", "cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % mapname) else: client.message('^1Error:^7 could not", "License, or # (at your option) any later version. #", "== 2: cmd, alias = sp func = self.getCmd(cmd) if", "not set round limit as gametype do not have rounds')", "getattr(self, cmd) return func return None ######################### VOTE TIMING ##############################", "cparams = 'scr_' + gt + '_scorelimit' self.console.setCvar(cparams, setting) def", "Register commands if 'commands' in self.config.sections(): for cmd in self.config.options('commands'):", "^2').join(self._aMaps.keys()))) self._vote = None self._value = None return else: client.message('^2You", "from \"votes\" section in config _aMaps = {} # All", "adLvl = {'guest': 0, 'user': 1, 'reg': 2, 'mod': 20,", "if now >= 1: setting = 0 elif now ==", "there is a vote in progress if not self._vote: client.message('No", "allowed to call any votes') def cmd_yes(self, data, client, cmd=None):", "vote needed to vote as well. Changed to automatic yes", "not have permission to call this vote') self._vote = None", "plugin for BigBrotherBot(B3) (www.bigbrotherbot.net) # Copyright (C) 2015 ph03n1x #", "You should have received a copy of the GNU General", "None self._kickRequested = None return self._value = self._kickRequested.name else: self.debug('could", "def mapvote(self, client, wantedMap): # Find if map is in", "server. a = self._value if a == 'maprestart' or a", "friendly fire on and off setting = self._value if not", "vote') self._vote = None return # Get further info for", "progress. Change time limit to ^2$s^7?\", 'roundlength': \"Round length vote", "to ^2$s^7?\", 'killcam': \"Killcam vote in progress. Turn killcam ^2$s^7?\",", "vote in progress. Turn killcam ^2$s^7?\", 'scorelimit': \"Scorelimit vote in", "'sd', 're', 'bas', 'dom'] gt = self.getGameType() if not isinstance(setting,", "% now) if now == 0: setting = 1 else:", "self.debug('Could not set new round limit. Voted value is not", "== 'kick': self._kickRequested = self.adminPlugin.findClientPrompt(self._value, client) if self._kickRequested: if self._kickRequested.maxLevel", "you can redistribute it and/or modify # it under the", "already in progress') return # Check if we have enough", "limit to ^2$s^7?\", 'roundlength': \"Round length vote in progress. Change", "try: setting = int(setting) except ValueError: self.debug('Could not set new", "self.adminPlugin.registerCommand(self, 'maprotate', 20, self.cmd_maprotate, None) self.adminPlugin.registerCommand(self, 'allvotes', 1, self.cmd_allvotes, None)", "MAP HANDLING ############################## def _search(self, maplist, partial): a = []", "for is not valid. Toggling to next mode. Killcam currently:", "int(setting) except ValueError: now = self.console.getCvar('scr_game_allowkillcam').getInt() self.debug('Setting being voted for", "cmd=None): \"\"\"\\ - list the next map in rotation \"\"\"", "= None self._kickRequested = None return self._value = self._kickRequested.name else:", "in progress. Restart current map?\", 'friendlyfire': \"Friendlyfire vote in progress.", "self._kickRequested.maxLevel >= 20: client.message('^1ABORTED^7: Cannot vote to kick admin!') self._vote", "'maprestart' or a == 'maprotate': self.console.say(self.getMessage(self._vote)) elif a != 'maprestart'", "elif cmd.time + 5 <= self.console.time(): return True else: return", "to the current vote \"\"\" # Check if there is", "######################### MAP HANDLING ############################## def _search(self, maplist, partial): a =", "= [] for mapname, consolename in maplist.iteritems(): if partial in", "None return self._value = self._kickRequested.name else: self.debug('could not get the", "1 setting = self._value if self._value == 'on': setting =", "cmd if hasattr(self, cmd): func = getattr(self, cmd) return func", "consideration of this plugin self.adminPlugin.registerCommand(self, 'nextmap', 1, self.cmd_nextmap, 'nm') self.adminPlugin.registerCommand(self,", "voted yes. Checked against amount of players in game _amt_no", "to automatic yes vote. __version__ = '1.0.2' __author__ = 'ph03n1x'", "'commands' in self.config.sections(): for cmd in self.config.options('commands'): level = self.config.get('commands',", "if self._mapRequested: self.confirmMap() else: self.console.rotateMap() elif self._vote == 'maprestart': self.confirmMaprestart()", "map in rotation \"\"\" if not self.aquireCmdLock2(cmd, client, 60, True):", "self._aVotes[entry.lower()] = adLvl[value] self.debug('Allowed votes are: %s' % self._aVotes) def", "'maprestart': self.confirmMaprestart() elif self._vote == 'friendlyfire': self.confirmFriendlyFire() elif self._vote ==", "Cannot vote for maps. mapvote turned out false') self._vote =", "2015 ph03n1x # # This program is free software; you", "[] def confirmKick(self): # Note - to kick someone we", "= 'scr_' + gt + '_scorelimit' self.console.setCvar(cparams, setting) def confirmTimeLimit(self):", "person who called vote as yes vote self._amt_yes.insert(0, client) if", "return cparams = 'scr_' + gt + '_scorelimit' self.console.setCvar(cparams, setting)", "GNU General Public License as published by # the Free", "for _kickRequested = None # Stores which player will be", "= [] _allplayers = [] # Amount of players in", "None self._amt_no = [] self._amt_yes = [] self._allplayers = []", "data) s = self._kickRequested self.debug('Kick vote passed. Kicking %s' %", "################################################################################# def cmd_vote(self, data, client, cmd=None): \"\"\"\\ !vote <setting> <value>", "client) if self._kickRequested: if self._kickRequested.maxLevel >= 20: client.message('^1ABORTED^7: Cannot vote", "= 'scr_' + gt + '_roundlength' self.console.setCvar(cparams, setting) def confirmRoundLimit(self):", "vote to change setting or cvar on server. \"\"\" #", "2, 'mod': 20, 'admin': 40, 'fulladmin': 60, 'senioradmin': 80, 'superadmin':", "amodes: cparams = 'scr_' + gt + '_roundlimit' self.console.setCvar(cparams, setting)", "self.confirmMap() else: self.console.rotateMap() elif self._vote == 'maprestart': self.confirmMaprestart() elif self._vote", "+ '_scorelimit' self.console.setCvar(cparams, setting) def confirmTimeLimit(self): setting = self._value gt", "Change score limit to ^2$s^7?\", 'timelimit': \"Timelimit vote in progress.", "for c in self.console.clients.getList(): if c.team != b3.TEAM_SPEC: playersInGame +=", "'killcam': \"Killcam vote in progress. Turn killcam ^2$s^7?\", 'scorelimit': \"Scorelimit", "# This program is free software; you can redistribute it", "= {} # All vote allowed maps. Imported from \"votemaps\"", "self._allplayers.insert(0, c) if playersInGame <= 1 and client.maxLevel < 100:", "License as published by # the Free Software Foundation; either", "# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA", "'off': setting = 0 if not isinstance(setting, int): try: setting", "software; you can redistribute it and/or modify # it under", "if map is in allowed list match = self._search(self._aMaps, wantedMap)", "gametype = self.console.getCvar('g_gametype').getString() if gametype: return gametype else: self.debug('Error getting", "self._vote: client.message('^3Vote canceled') self.denyVote() elif not self._vote: client.message('^3No vote in", "1: setting = 0 elif now == 0: setting =", "killcam ^2$s^7?\", 'scorelimit': \"Scorelimit vote in progress. Change score limit", "param)) self.console.say(self.getMessage('tovote')) def aquireCmdLock2(self, cmd, client, delay, all=True): if client.maxLevel", "def cmd_veto(self, data, client, cmd=None): \"\"\"\\ Cancel a vote in", "for (mapname, consolename) in self.config.items('votemaps'): if mapname: self._aMaps[mapname] = consolename", "type of vote is allowed if self._vote not in self._aVotes:", "matching your request') return False ############### NEXTMAP FUNCTIONING ################ def", "hasattr(self, cmd): func = getattr(self, cmd) return func return None", "[] self._amt_yes = [] self._allplayers = [] def denyVote(self): if", "a != 'maprestart' and a != 'maprotate': param = {'s':", "int(setting)) def confirmKillCam(self): # rcon for killcam: scr_game_allowkillcam - 0", "players in game to vote and store present players. Only", "def confirmMaprestart(self): # This will restart the current map self.console.write('fast_restart')", "Foundation; either version 2 of the License, or # (at", "\"\"\" allowed = [] for k in self._aVotes.keys(): if client.maxLevel", "0 or 1 setting = self._value if self._value == 'on':", "def confirmRoundLength(self): setting = self._value amodes = ['ctf', 'sd', 're',", "'roundlimit': \"Round limit vote in progress. Change round limit to", "s.name) s.kick('Voted against', '', None, True, '') self._kickRequested = None", "Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # #", "Change round limit to ^2$s^7?\", } def onStartup(self): self.adminPlugin =", "change setting or cvar on server. \"\"\" # Check if", "= self._value if a == 'maprestart' or a == 'maprotate':", "'_scorelimit' self.console.setCvar(cparams, setting) def confirmTimeLimit(self): setting = self._value gt =", "len(self._allplayers) / 2 if vNo > vPass: self.denyVote() def cmd_nextmap(self,", "client, cmd=None): \"\"\"\\ !vote <setting> <value> - vote to change", "maps can be voted on if self._vote == 'maps': v1", "unmet. # v1.0.2 - Added \"!vote maps\" to show what", "client.message('^1ABORTED!^7Multiple matches: %s' % match) return False elif len(match) ==", "self._kickRequested: if self._kickRequested.maxLevel >= 20: client.message('^1ABORTED^7: Cannot vote to kick", "killcam: scr_game_allowkillcam - 0 or 1 setting = self._value if", "in self._amt_yes or client not in self._amt_no: self._amt_no.insert(0, client) #", "who called vote needed to vote as well. Changed to", "def checkIfAllowed(self, client, voteType): if client.maxLevel >= self._aVotes[voteType]: return True", "# Check if the player already voted. If not, register", "client.message('Are you drunk? You already voted!') return elif client not", "^2!help vote ^7for info') return # Check if player is", "have received a copy of the GNU General Public License", "Check if we have enough data for vote data =", "player already voted if client in self._amt_yes or client in", "b3.plugin import b3.events class CodvotePlugin(b3.plugin.Plugin): adminPlugin = None _vote =", "in the hope that it will be useful, # but", "= 0 self.console.setCvar('scr_game_allowkillcam', int(setting)) def confirmScoreLimit(self): # CVAR to write", "v = self.checkIfAllowed(client, self._vote) if not v: client.message('You do not", "which vote is currently in progress _value = None #", "= data[0] value = data[1] self._vote = type self._value =", "elif self._value == 'off': setting = 0 else: self.debug('Unknown wanted", "or client in self._amt_no: client.message('Are you drunk? You already voted!')", "+ gt + '_roundlength' self.console.setCvar(cparams, setting) def confirmRoundLimit(self): setting =", "the player already voted if client in self._amt_yes or client", "in self._amt_yes or client in self._amt_no: client.message('Are you drunk? You", "Get further info for proper processing if self._vote == 'map'", "votes') def cmd_yes(self, data, client, cmd=None): \"\"\"\\ Vote yes to", "self._vote = None return # Get further info for proper", "if the player already voted if client in self._amt_yes or", "Check if vote already in progress if self._vote: client.message('^1ERROR^7: Vote", "already voted if client in self._amt_yes or client in self._amt_no:", "'map': \"Map vote in progress: Change map to ^3$s^7?\", 'nextmap':", "return # Get further info for proper processing if self._vote", "Friendlyfire. Toggling to next mode') now = self.console.getCvar('scr_team_fftype').getInt() if now", "GNU General Public License for more details. # # You", "self.checkIfAllowed(client, 'map') v2 = self.checkIfAllowed(client, 'nextmap') if v1 or v2:", "client.maxLevel >= self._aVotes[k]: allowed.insert(0, k) if len(allowed) > 0: p", "in allowed list match = self._search(self._aMaps, wantedMap) if len(match) ==", "cmd, client, delay, all=True): if client.maxLevel >= 20: return True", "self.config.has_section('votes'): adLvl = {'guest': 0, 'user': 1, 'reg': 2, 'mod':", "20: client.message('^1ABORTED^7: Cannot vote to kick admin!') self._vote = None", "self._amt_yes or client not in self._amt_no: self._amt_no.insert(0, client) # Let", "# Amount of players in game _mapRequested = None #", "self.debug('Allowed votes are: %s' % self._aVotes) def getCmd(self, cmd): cmd", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "to ^3$s^7?\", 'kick': \"Kick vote in progress: Kick ^2$s^7?\", 'maprotate':", "vote ^7for info') return # Check if player is asking", "Params already stored') elif self._vote == 'kick': self.confirmKick() elif self._vote", "b3.TEAM_SPEC: playersInGame += 1 self._allplayers.insert(0, c) if playersInGame <= 1", "the current vote \"\"\" # Check if there is a", "elif self._vote == 'killcam': self.confirmKillCam() elif self._vote == 'scorelimit': self.confirmScoreLimit()", "in progress if self._vote: client.message('^1ERROR^7: Vote already in progress') return", "players present at vote call can vote playersInGame = 0", "now = self.console.getCvar('scr_game_allowkillcam').getInt() self.debug('Setting being voted for is not valid.", "can be voted on if self._vote == 'maps': v1 =", "self._amt_yes.insert(0, client) if len(self._amt_yes) > (len(self._allplayers) / 2): self.confirmVote() def", "self._amt_no: client.message('Are you drunk? You already voted!') return elif client", "turned out false') self._vote = None return if self._vote ==", "int): if self._value == 'on': setting = 1 elif self._value", "_aMaps = {} # All vote allowed maps. Imported from", "of vote is allowed if self._vote not in self._aVotes: client.message('Vote", "under the terms of the GNU General Public License as", "called vote needed to vote as well. Changed to automatic", "not in self._allplayers: client.message('Sorry, you cannot enter current vote') return", "[] for c in self.console.clients.getList(): if c.team != b3.TEAM_SPEC: playersInGame", "0: client.message('^1ABORTED!^7No maps matching your request') return False ############### NEXTMAP", "register vote if client in self._amt_yes or client in self._amt_no:", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "ok. Broadcast to server self.sendBroadcast() # Start timer self.voteTimer() #", "not allowed. Use ^2!allvotes ^7for available votes.') self._vote = None", "passing _aVotes = {} # All votes allowed. Imported from", "now >= 1: setting = 0 elif now == 0:", "map to ^3$s^7?\", 'nextmap': \"Next map vote in progress. Change", "^3$s^7?\", 'nextmap': \"Next map vote in progress. Change next map", "vPass = len(self._allplayers) / 2 if vNo > vPass: self.denyVote()", "map?\", 'friendlyfire': \"Friendlyfire vote in progress. Change friendlyfire mode to", "self._allplayers: client.message('Sorry, you cannot enter current vote') return # Check", "allowed = [] for k in self._aVotes.keys(): if client.maxLevel >=", "\"\"\" # Check if there is a vote in progress", "self._value = None self._kickRequested = None return self._value = self._kickRequested.name", "== 'maprestart' or a == 'maprotate': self.console.say(self.getMessage(self._vote)) elif a !=", "entered') # Check if majority of players voted already vYes", "^3$s^7?\", 'kick': \"Kick vote in progress: Kick ^2$s^7?\", 'maprotate': \"Rotate", "vYes = len(self._amt_yes) vPass = len(self._allplayers) / 2 if vYes", "CONFIRM VOTES ###################### def confirmVote(self): self.console.say('^3Vote passed!^7') if self._vote ==", "self.cmd_maprotate, None) self.adminPlugin.registerCommand(self, 'allvotes', 1, self.cmd_allvotes, None) # Register events", "def getCmd(self, cmd): cmd = 'cmd_%s' % cmd if hasattr(self,", "set new round limit. Voted value is not integer') return", "except ValueError: self.debug('ERROR: Could not set new round length. Voted", "> vPass: self.confirmVote() def cmd_no(self, data, client=None, cmd=None): \"\"\"\\ Vote", "MA 02110-1301 USA # # Changelog: # v1.0.1 - Fixed", "voteMessage(self): if self._vote: self.console.say('^110 seconds until vote end!') t2 =", "you cannot enter current vote') return # Check if the", "if hasattr(self, cmd): func = getattr(self, cmd) return func return", "Vote NO to the current vote \"\"\" # Check if", "CVAR to write is scr_<gametype>_scorelimit <number> setting = self._value gt", "self.debug('Unable to get [votetime] from settings. Using default: %s' %", "== 'maprotate' or len(data) == 1 and data[0] == 'maprestart'", "40, 'fulladmin': 60, 'senioradmin': 80, 'superadmin': 100} for (entry, value)", "self.console.setCvar(cparams, setting) def confirmRoundLimit(self): setting = self._value amodes = ['ctf',", "is a vote in progress if not self._vote: client.message('No vote", "list match = self._search(self._aMaps, wantedMap) if len(match) == 1: self._mapRequested", "A PARTICULAR PURPOSE. See the # GNU General Public License", "vPass = len(self._allplayers) / 2 if vYes > vPass: self.confirmVote()", "what maps can be voted on if self._vote == 'maps':", "1: self._mapRequested = match[0] self._value = match[0] return True elif", "published by # the Free Software Foundation; either version 2", "return if gt in amodes: cparams = 'scr_' + gt", "# This will cycle to next map when needed. self.console.write('map", "^2$s^7?\", 'roundlimit': \"Round limit vote in progress. Change round limit", "\"Round length vote in progress. Change round length to ^2$s^7?\",", "__version__ = '1.0.2' __author__ = 'ph03n1x' import b3, threading import", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "Imported from \"votemaps\" section in config _amt_yes = [] #", "in self._allplayers: client.message('Sorry, you cannot enter current vote') return #", "voted vNo = len(self._amt_no) vPass = len(self._allplayers) / 2 if", "################################################################################# # COMMANDS # ################################################################################# def cmd_vote(self, data, client, cmd=None):", "x) elif len(allowed) == 0: client.message('You are not allowed to", "a vote in progress if not self._vote: client.message('No vote in", "self.console.setCvar(cparams, setting) def confirmRoundLength(self): setting = self._value amodes = ['ctf',", "else: return False def checkIfAllowed(self, client, voteType): if client.maxLevel >=", "votes.') self._vote = None return # Check if player has", "as gametype do not have rounds') def getGameType(self): gametype =", "cparams = 'scr_' + gt + '_roundlength' self.console.setCvar(cparams, setting) def", "cmd_vote(self, data, client, cmd=None): \"\"\"\\ !vote <setting> <value> - vote", "self.debug('Kick vote passed. Kicking %s' % s.name) s.kick('Voted against', '',", "len(match) == 1: self._mapRequested = match[0] self._value = match[0] return", "2: cmd, alias = sp func = self.getCmd(cmd) if func:", "and data[0] == 'maprestart' or len(data) == 1 and data[0]", "False def checkIfAllowed(self, client, voteType): if client.maxLevel >= self._aVotes[voteType]: return", "- 0 or 1 setting = self._value if self._value ==", "'maprotate': self.console.say(self.getMessage(self._vote)) elif a != 'maprestart' and a != 'maprotate':", "section try: self._votetime = self.config.getint('settings', 'votetime') except: self.debug('Unable to get", "not isinstance(setting, int): if self._value == 'on': setting = 1", "= len(self._amt_yes) vPass = len(self._allplayers) / 2 if vYes >", "to show what maps can be called into vote. #", "type = data[0] value = data[1] self._vote = type self._value", "setting) else: self.debug('Could not set round limit as gametype do", "True elif cmd.time + 5 <= self.console.time(): return True else:", "length vote in progress. Change round length to ^2$s^7?\", 'roundlimit':", "Toggling to next mode') now = self.console.getCvar('scr_team_fftype').getInt() if now >=", "return else: client.message('^2You do not have permission to call map", "% match) return False elif len(match) == 0: client.message('^1ABORTED!^7No maps", "return mapname = self.console.getNextMap() if mapname: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s'", "asking what maps can be voted on if self._vote ==", "or self._vote == 'nextmap': q = self.mapvote(client, self._value) if not", "None self._value = None self._kickRequested = None return self._value =", "canceled for not passing _aVotes = {} # All votes", "c.team != b3.TEAM_SPEC: playersInGame += 1 self._allplayers.insert(0, c) if playersInGame", "plugin') return # Register commands if 'commands' in self.config.sections(): for", "for killcam: scr_game_allowkillcam - 0 or 1 setting = self._value", "= self.mapvote(client, self._value) if not q: self.debug('Vote aborted: Cannot vote", "in self.console.clients.getList(): if c.team != b3.TEAM_SPEC: playersInGame += 1 self._allplayers.insert(0,", "keyword, admin, silent=True/False, data) s = self._kickRequested self.debug('Kick vote passed.", "= None self._value = None return else: client.message('^2You do not", "data, client, cmd=None): \"\"\"\\ Vote yes to the vote in", "out false') self._vote = None return if self._vote == 'kick':", "self.debug('Error getting gametype. Response is %s' % gametype) return False", "= None if len(sp) == 2: cmd, alias = sp", "# Get further info for proper processing if self._vote ==", "to ^2$s^7?\", 'roundlimit': \"Round limit vote in progress. Change round", "a = self._value if a == 'maprestart' or a ==", "['ctf', 'sd', 're', 'bas', 'dom'] gt = self.getGameType() if not", "for BigBrotherBot(B3) (www.bigbrotherbot.net) # Copyright (C) 2015 ph03n1x # #", "rcon for killcam: scr_game_allowkillcam - 0 or 1 setting =", "all=True): if client.maxLevel >= 20: return True elif cmd.time +", "'maprotate': if self._mapRequested: self.confirmMap() else: self.console.rotateMap() elif self._vote == 'maprestart':", "vote passed _default_messages = { 'tovote': '^7Use ^2!yes ^7or ^2!no", "This wil broadcast vote message to server. a = self._value", "map is being voted for _kickRequested = None # Stores", "= self._value if self._value == 'on': setting = 1 elif", "getCmd(self, cmd): cmd = 'cmd_%s' % cmd if hasattr(self, cmd):", "vote allowed maps. Imported from \"votemaps\" section in config _amt_yes", "self._vote = data[0] self._value = data[0] elif len(data) == 2:", "any later version. # # This program is distributed in", "Fifth Floor, Boston, MA 02110-1301 USA # # Changelog: #", "maplist.iteritems(): if partial in mapname: a.append(mapname) elif partial in consolename:", "present at vote call can vote playersInGame = 0 self._allplayers", "return # Check if the player already voted if client", "if self._vote == 'map': self.confirmMap() elif self._vote == 'nextmap': self.debug('nextmap", "voting: %s' % self._aMaps) # Load votes section if self.config.has_section('votes'):", "# Check if majority of players voted vNo = len(self._amt_no)", "self.console.rotateMap() def cmd_veto(self, data, client, cmd=None): \"\"\"\\ Cancel a vote", "consolename self.debug('Successfully entered maps for voting: %s' % self._aMaps) #", "map to ^3$s^7?\", 'kick': \"Kick vote in progress: Kick ^2$s^7?\",", "except: self.debug('Unable to get [votetime] from settings. Using default: %s'", "# This will toggle friendly fire on and off setting", "def confirmVote(self): self.console.say('^3Vote passed!^7') if self._vote == 'map': self.confirmMap() elif", "= None self._value = None self._kickRequested = None return #", "> vPass: self.denyVote() def cmd_nextmap(self, data, client=None, cmd=None): \"\"\"\\ -", "setting = 1 elif self._value == 'off': setting = 0", "^2!no ^7 to vote', 'map': \"Map vote in progress: Change", "vote for maps. mapvote turned out false') self._vote = None", "are: %s' % self._aVotes) def getCmd(self, cmd): cmd = 'cmd_%s'", "try: setting = int(setting) except ValueError: self.debug('ERROR: Could not set", "on if self._vote == 'maps': v1 = self.checkIfAllowed(client, 'map') v2", "called into vote. # - Fixed issue where person who", "{'s': a} self.console.say(self.getMessage(self._vote, param)) self.console.say(self.getMessage('tovote')) def aquireCmdLock2(self, cmd, client, delay,", "gt + '_timelimit' self.console.setCvar(cparams, setting) def confirmRoundLength(self): setting = self._value", "if player has permission to call vote type v =", "get map list') def cmd_maprotate(self, data, client, cmd=None): \"\"\"\\ Cycle", "Handle EVT_GAME_ROUND_END \"\"\" if self._mapRequested: self.confirmMap() self._mapRequested = None ###############", "more details. # # You should have received a copy", "self.console.say(self.getMessage('tovote')) def aquireCmdLock2(self, cmd, client, delay, all=True): if client.maxLevel >=", "'kick': \"Kick vote in progress: Kick ^2$s^7?\", 'maprotate': \"Rotate map", "if gametype: return gametype else: self.debug('Error getting gametype. Response is", "Note - to kick someone we need: client.kick(reason, keyword, admin,", "be useful, # but WITHOUT ANY WARRANTY; without even the", "s.kick('Voted against', '', None, True, '') self._kickRequested = None def", "_default_messages = { 'tovote': '^7Use ^2!yes ^7or ^2!no ^7 to", "self._vote = None return if self._vote == 'kick': self._kickRequested =", "<= self.console.time(): return True else: return False def checkIfAllowed(self, client,", "alias = sp func = self.getCmd(cmd) if func: self.adminPlugin.registerCommand(self, cmd,", "score limit to ^2$s^7?\", 'timelimit': \"Timelimit vote in progress. Change", "VOTES ###################### def confirmVote(self): self.console.say('^3Vote passed!^7') if self._vote == 'map':", "get the person to kick') self._vote = None self._value =", "1 self.console.setCvar('scr_team_fftype', int(setting)) def confirmKillCam(self): # rcon for killcam: scr_game_allowkillcam", "Added \"!vote maps\" to show what maps can be called", "failed!') self._vote = None self._value = None self._amt_no = []", "cmd=None): \"\"\"\\ Vote yes to the vote in progress \"\"\"", "_amt_yes = [] # Amount of players who voted yes.", "limit to ^2$s^7?\", } def onStartup(self): self.adminPlugin = self.console.getPlugin('admin') if", "= None self._value = None self._amt_no = [] self._amt_yes =", "'maps': v1 = self.checkIfAllowed(client, 'map') v2 = self.checkIfAllowed(client, 'nextmap') if", "elif partial in consolename: a.append(mapname) return a def mapvote(self, client,", "in progress: Change map to ^3$s^7?\", 'nextmap': \"Next map vote", "ValueError: self.debug('Could not set new round limit. Voted value is", "vote', 'map': \"Map vote in progress: Change map to ^3$s^7?\",", "return elif client not in self._amt_yes or client not in", "self._amt_yes.insert(0, client) # Let player know that vote is registered", "vote remaining in progress if requirements for vote unmet. #", "'ph03n1x' import b3, threading import b3.plugin import b3.events class CodvotePlugin(b3.plugin.Plugin):", "setting for Friendlyfire. Toggling to next mode') now = self.console.getCvar('scr_team_fftype').getInt()", "HANDLING ############################## def _search(self, maplist, partial): a = [] for", "return # Check if the player already voted. If not,", "from settings. Using default: %s' % self._votetime) # Load votemaps", "if self._mapRequested: self.confirmMap() self._mapRequested = None ############### CONFIRM VOTES ######################", "= self.console.getCvar('g_gametype').getString() if gametype: return gametype else: self.debug('Error getting gametype.", "in progress. Turn killcam ^2$s^7?\", 'scorelimit': \"Scorelimit vote in progress.", "permission to call this vote') self._vote = None return #", "# Find if map is in allowed list match =", "# the Free Software Foundation; either version 2 of the", "next map when needed. self.console.write('map %s' % self._aMaps[self._mapRequested]) self._mapRequested =", "Change round length to ^2$s^7?\", 'roundlimit': \"Round limit vote in", "allowed to vote if client not in self._allplayers: client.message('Sorry, you", "= self.console.getCvar('scr_game_allowkillcam').getInt() self.debug('Setting being voted for is not valid. Toggling", "== 'map': self.confirmMap() elif self._vote == 'nextmap': self.debug('nextmap vote passed.", "%s' % self._votetime) # Load votemaps section if self.config.has_section('votemaps'): for", "1: match = (', ').join(match) client.message('^1ABORTED!^7Multiple matches: %s' % match)", "# ################################################################################# def cmd_vote(self, data, client, cmd=None): \"\"\"\\ !vote <setting>", "True elif len(match) > 1: match = (', ').join(match) client.message('^1ABORTED!^7Multiple", "self.confirmTimeLimit() elif self._vote == 'roundlength': self.confirmRoundLength() elif self._vote == 'roundlimit':", "client.maxLevel >= self._aVotes[voteType]: return True else: return False ################################################################################# #", "'scorelimit': \"Scorelimit vote in progress. Change score limit to ^2$s^7?\",", "return # Check if we have enough data for vote", "self.registerEvent('EVT_GAME_EXIT', self.onGameEnd) def onLoadConfig(self): # Load settings section try: self._votetime", "def confirmRoundLimit(self): setting = self._value amodes = ['ctf', 'sd', 're',", "'re', 'bas', 'dom'] gt = self.getGameType() if not isinstance(setting, int):", "isinstance(setting, int): if self._value == 'on': setting = 1 elif", "% self._aMaps) # Load votes section if self.config.has_section('votes'): adLvl =", "default: %s' % self._votetime) # Load votemaps section if self.config.has_section('votemaps'):", "License # along with this program; if not, write to", "off setting = self._value if not isinstance(setting, int): if self._value", "= None # Stores which vote is currently in progress", "'maprotate', 20, self.cmd_maprotate, None) self.adminPlugin.registerCommand(self, 'allvotes', 1, self.cmd_allvotes, None) #", "to call this vote') self._vote = None return # Get", "if vYes > vPass: self.confirmVote() def cmd_no(self, data, client=None, cmd=None):", "t1 = threading.Timer((self._votetime - 5), self.voteMessage) t1.start() def voteMessage(self): if", "Voted value is not integer') return cparams = 'scr_' +", "(', ').join(p) client.message('Allowed votes are: %s' % x) elif len(allowed)", "t2.start() ######################### MAP HANDLING ############################## def _search(self, maplist, partial): a", "cvar on server. \"\"\" # Check if vote already in", "client.message('Vote type not allowed. Use ^2!allvotes ^7for available votes.') self._vote", "None self._kickRequested = None return # Seems like vote is", "try: setting = int(setting) except ValueError: now = self.console.getCvar('scr_game_allowkillcam').getInt() self.debug('Setting", ">= 1: setting = 0 elif now == 0: setting", "length to ^2$s^7?\", 'roundlimit': \"Round limit vote in progress. Change", "self.getGameType() if not isinstance(setting, int): try: setting = int(setting) except", "Type ^2!help vote ^7for info') return # Check if player", "= 1 self.console.setCvar('scr_team_fftype', int(setting)) def confirmKillCam(self): # rcon for killcam:", "client.message('^3Vote canceled') self.denyVote() elif not self._vote: client.message('^3No vote in progress')", "self._kickRequested = None return # Seems like vote is ok.", "the GNU General Public License as published by # the", "vote call can vote playersInGame = 0 self._allplayers = []", "self.debug('could not get the person to kick') self._vote = None", "else: client.message('^1ERROR^7: Invalid usage. Type ^2!help vote ^7for info') return", "in self._amt_no: self._amt_no.insert(0, client) # Let player know that vote", "mapname, consolename in maplist.iteritems(): if partial in mapname: a.append(mapname) elif", "'1.0.2' __author__ = 'ph03n1x' import b3, threading import b3.plugin import", "progress. Change next map to ^3$s^7?\", 'kick': \"Kick vote in", "def cmd_nextmap(self, data, client=None, cmd=None): \"\"\"\\ - list the next", "self.adminPlugin.registerCommand(self, 'allvotes', 1, self.cmd_allvotes, None) # Register events self.registerEvent('EVT_GAME_EXIT', self.onGameEnd)", "not integer') return cparams = 'scr_' + gt + '_timelimit'", "Could not set new scorelimit. Voted value is not integer')", "if vNo > vPass: self.denyVote() def cmd_nextmap(self, data, client=None, cmd=None):", "= [] # Amount of players in game _mapRequested =", "Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA", "threading.Timer(10, self.denyVote) t2.start() ######################### MAP HANDLING ############################## def _search(self, maplist,", "we have enough data for vote data = data.split() if", "def voteMessage(self): if self._vote: self.console.say('^110 seconds until vote end!') t2", "None ######################### VOTE TIMING ############################## def voteTimer(self): t1 = threading.Timer((self._votetime", "if now == 0: setting = 1 else: setting =", "setting or cvar on server. \"\"\" # Check if vote", "# All vote allowed maps. Imported from \"votemaps\" section in", "elif self._value == 'off': setting = 0 if not isinstance(setting,", "not integer') return if gt in amodes: cparams = 'scr_'", "voted already vYes = len(self._amt_yes) vPass = len(self._allplayers) / 2", "= self.config.get('commands', cmd) sp = cmd.split('-') alias = None if", "return False def sendBroadcast(self): # This wil broadcast vote message", "cmd.split('-') alias = None if len(sp) == 2: cmd, alias", "vote unmet. # v1.0.2 - Added \"!vote maps\" to show", "if not self._vote: client.message('No vote in progress') return # Check", "before a vote will be canceled for not passing _aVotes", "Change time limit to ^2$s^7?\", 'roundlength': \"Round length vote in", "data.split() if len(data) == 1 and data[0] == 'maprotate' or", "admin, silent=True/False, data) s = self._kickRequested self.debug('Kick vote passed. Kicking", "program is free software; you can redistribute it and/or modify", "be kicked if vote passed _default_messages = { 'tovote': '^7Use", "self.debug('Successfully entered maps for voting: %s' % self._aMaps) # Load", "votemaps section if self.config.has_section('votemaps'): for (mapname, consolename) in self.config.items('votemaps'): if", "Check if enough players in game to vote and store", "COMMANDS # ################################################################################# def cmd_vote(self, data, client, cmd=None): \"\"\"\\ !vote", "client=None, cmd=None): \"\"\"\\ Vote NO to the current vote \"\"\"", "else: self.debug('Could not set round limit as gametype do not", "self.confirmMaprestart() elif self._vote == 'friendlyfire': self.confirmFriendlyFire() elif self._vote == 'killcam':", "players voted already vYes = len(self._amt_yes) vPass = len(self._allplayers) /", "voted on if self._vote == 'maps': v1 = self.checkIfAllowed(client, 'map')", "else: self.debug('could not get the person to kick') self._vote =", "Invalid usage. Type ^2!help vote ^7for info') return # Check", "cmd_allvotes(self, data, client, cmd=None): \"\"\"\\ Show all the votes you", "server self.sendBroadcast() # Start timer self.voteTimer() # Set person who", "self._vote == 'kick': self._kickRequested = self.adminPlugin.findClientPrompt(self._value, client) if self._kickRequested: if", "not in self._amt_no: self._amt_yes.insert(0, client) # Let player know that", "(C) 2015 ph03n1x # # This program is free software;", "mapvote turned out false') self._vote = None return if self._vote", "\"votes\" section in config _aMaps = {} # All vote", "= 0 else: self.debug('Unknown wanted setting for Friendlyfire. Toggling to", "t2 = threading.Timer(10, self.denyVote) t2.start() ######################### MAP HANDLING ############################## def", "a.append(mapname) elif partial in consolename: a.append(mapname) return a def mapvote(self,", "are not allowed to call any votes') def cmd_yes(self, data,", "for not passing _aVotes = {} # All votes allowed.", "or a == 'maprotate': self.console.say(self.getMessage(self._vote)) elif a != 'maprestart' and", "value except ValueError: self._aVotes[entry.lower()] = adLvl[value] self.debug('Allowed votes are: %s'", "restart the current map self.console.write('fast_restart') def confirmFriendlyFire(self): # This will", "in config _amt_yes = [] # Amount of players who", "of this plugin self.adminPlugin.registerCommand(self, 'nextmap', 1, self.cmd_nextmap, 'nm') self.adminPlugin.registerCommand(self, 'maprotate',", "to vote as well. Changed to automatic yes vote. __version__", "None def confirmMap(self): # This will cycle to next map", "param = {'s': a} self.console.say(self.getMessage(self._vote, param)) self.console.say(self.getMessage('tovote')) def aquireCmdLock2(self, cmd,", "Fixed vote remaining in progress if requirements for vote unmet.", "self._value if not isinstance(setting, int): if self._value == 'on': setting", "= 1 elif self._value == 'off': setting = 0 else:", "############### NEXTMAP FUNCTIONING ################ def onGameEnd(self, event): \"\"\" Handle EVT_GAME_ROUND_END", "client.message('^2You do not have permission to call map votes') self._vote", "self.confirmScoreLimit() elif self._vote == 'timelimit': self.confirmTimeLimit() elif self._vote == 'roundlength':", "is not valid. Toggling to next mode. Killcam currently: %s'", "self._aVotes: client.message('Vote type not allowed. Use ^2!allvotes ^7for available votes.')", "True else: return False def checkIfAllowed(self, client, voteType): if client.maxLevel", "needed to vote as well. Changed to automatic yes vote.", "# Start timer self.voteTimer() # Set person who called vote", "has been entered') # Check if majority of players voted", "[] # Amount of players in game _mapRequested = None", "if the player already voted. If not, register vote if", "# This will restart the current map self.console.write('fast_restart') def confirmFriendlyFire(self):", "return a def mapvote(self, client, wantedMap): # Find if map", "_amt_no = [] _allplayers = [] # Amount of players", "= None # Stores which map is being voted for", "TIMING ############################## def voteTimer(self): t1 = threading.Timer((self._votetime - 5), self.voteMessage)", "cmd = 'cmd_%s' % cmd if hasattr(self, cmd): func =", "- Fixed issue where person who called vote needed to", "int(value) self._aVotes[entry.lower()] = value except ValueError: self._aVotes[entry.lower()] = adLvl[value] self.debug('Allowed", "if self._vote: client.message('^3Vote canceled') self.denyVote() elif not self._vote: client.message('^3No vote", "= self.console.getCvar('scr_team_fftype').getInt() if now >= 1: setting = 0 elif", "self._vote = None return # Check if player has permission", "else: return False ################################################################################# # COMMANDS # ################################################################################# def cmd_vote(self,", "(len(self._allplayers) / 2): self.confirmVote() def cmd_allvotes(self, data, client, cmd=None): \"\"\"\\", "partial): a = [] for mapname, consolename in maplist.iteritems(): if", "self.adminPlugin: self.error('Could not find admin plugin') return # Register commands", "you drunk? You already voted!') return elif client not in", "setting = self._value amodes = ['ctf', 'sd', 're', 'bas', 'dom']", "maps for voting: %s' % self._aMaps) # Load votes section", "vote in progress. Change next map to ^3$s^7?\", 'kick': \"Kick", "== 'scorelimit': self.confirmScoreLimit() elif self._vote == 'timelimit': self.confirmTimeLimit() elif self._vote", "if len(match) == 1: self._mapRequested = match[0] self._value = match[0]", "cparams = 'scr_' + gt + '_timelimit' self.console.setCvar(cparams, setting) def", "func = self.getCmd(cmd) if func: self.adminPlugin.registerCommand(self, cmd, level, func, alias)", "in progress. Change round length to ^2$s^7?\", 'roundlimit': \"Round limit", "self.cmd_allvotes, None) # Register events self.registerEvent('EVT_GAME_EXIT', self.onGameEnd) def onLoadConfig(self): #", "progress if requirements for vote unmet. # v1.0.2 - Added", "section in config _amt_yes = [] # Amount of players", "false') self._vote = None return if self._vote == 'kick': self._kickRequested", "to vote', 'map': \"Map vote in progress: Change map to", "NEXTMAP FUNCTIONING ################ def onGameEnd(self, event): \"\"\" Handle EVT_GAME_ROUND_END \"\"\"", "len(self._amt_no) vPass = len(self._allplayers) / 2 if vNo > vPass:", "Check if there is a vote in progress if not", "False ################################################################################# # COMMANDS # ################################################################################# def cmd_vote(self, data, client,", "self.console.getNextMap() if mapname: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % mapname) else:", "not integer') return cparams = 'scr_' + gt + '_scorelimit'", "%s' % s.name) s.kick('Voted against', '', None, True, '') self._kickRequested", "self._vote: client.message('No vote in progress') return # Check if player", "client in self._amt_yes or client in self._amt_no: client.message('Are you drunk?", "1 and data[0] == 'maprotate' or len(data) == 1 and", "'nextmap', 1, self.cmd_nextmap, 'nm') self.adminPlugin.registerCommand(self, 'maprotate', 20, self.cmd_maprotate, None) self.adminPlugin.registerCommand(self,", "vote in progress') return # Check if player is allowed", "# Set person who called vote as yes vote self._amt_yes.insert(0,", "elif self._vote == 'roundlength': self.confirmRoundLength() elif self._vote == 'roundlimit': self.confirmRoundLimit()", "False def sendBroadcast(self): # This wil broadcast vote message to", "func, alias) # Re-deploy commands for consideration of this plugin", "is in allowed list match = self._search(self._aMaps, wantedMap) if len(match)", "= None self._kickRequested = None return # Seems like vote", "self.config.items('votemaps'): if mapname: self._aMaps[mapname] = consolename self.debug('Successfully entered maps for", "players. Only players present at vote call can vote playersInGame", "data[0] self._value = data[0] elif len(data) == 2: type =", "of the GNU General Public License # along with this", "value = int(value) self._aVotes[entry.lower()] = value except ValueError: self._aVotes[entry.lower()] =", "confirmRoundLength(self): setting = self._value amodes = ['ctf', 'sd', 're', 'bas',", "def onStartup(self): self.adminPlugin = self.console.getPlugin('admin') if not self.adminPlugin: self.error('Could not", "self.console.say(self.getMessage(self._vote, param)) self.console.say(self.getMessage('tovote')) def aquireCmdLock2(self, cmd, client, delay, all=True): if", "self._value if self._value == 'on': setting = 1 elif self._value", "return False def checkIfAllowed(self, client, voteType): if client.maxLevel >= self._aVotes[voteType]:", "are: %s' % x) elif len(allowed) == 0: client.message('You are", "is registered client.message('^3Your vote has been entered') # Check if", "self.sendBroadcast() # Start timer self.voteTimer() # Set person who called", "Check if type of vote is allowed if self._vote not", "do not have permission to call this vote') self._vote =", "in progress \"\"\" if self._vote: client.message('^3Vote canceled') self.denyVote() elif not", "len(allowed) == 0: client.message('You are not allowed to call any", "client.message('^1ABORTED^7: Cannot vote to kick admin!') self._vote = None self._value", "elif len(allowed) == 0: client.message('You are not allowed to call", "commands for consideration of this plugin self.adminPlugin.registerCommand(self, 'nextmap', 1, self.cmd_nextmap,", "map self.console.write('fast_restart') def confirmFriendlyFire(self): # This will toggle friendly fire", "Imported from \"votes\" section in config _aMaps = {} #", "value is not integer') return cparams = 'scr_' + gt", "\"\"\" Handle EVT_GAME_ROUND_END \"\"\" if self._mapRequested: self.confirmMap() self._mapRequested = None", "= data[0] self._value = data[0] elif len(data) == 2: type", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "vote and store present players. Only players present at vote", "self._vote == 'map': self.confirmMap() elif self._vote == 'nextmap': self.debug('nextmap vote", "def confirmKick(self): # Note - to kick someone we need:", "^2$s^7?\", } def onStartup(self): self.adminPlugin = self.console.getPlugin('admin') if not self.adminPlugin:", "\"Friendlyfire vote in progress. Change friendlyfire mode to ^2$s^7?\", 'killcam':", "toggle friendly fire on and off setting = self._value if", "mapname: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % mapname) else: client.message('^1Error:^7 could", "the terms of the GNU General Public License as published", "Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor,", "checkIfAllowed(self, client, voteType): if client.maxLevel >= self._aVotes[voteType]: return True else:", "= [] for c in self.console.clients.getList(): if c.team != b3.TEAM_SPEC:", "vote if client in self._amt_yes or client in self._amt_no: client.message('Are", "permission to call map votes') self._vote = None self._value =", "progress. Turn killcam ^2$s^7?\", 'scorelimit': \"Scorelimit vote in progress. Change", "return True elif cmd.time + 5 <= self.console.time(): return True", "votes section if self.config.has_section('votes'): adLvl = {'guest': 0, 'user': 1,", "confirmKick(self): # Note - to kick someone we need: client.kick(reason,", "vote passed. Params already stored') elif self._vote == 'kick': self.confirmKick()", "self._kickRequested self.debug('Kick vote passed. Kicking %s' % s.name) s.kick('Voted against',", "Check if player is allowed to vote if client not", "vNo = len(self._amt_no) vPass = len(self._allplayers) / 2 if vNo", "def voteTimer(self): t1 = threading.Timer((self._votetime - 5), self.voteMessage) t1.start() def", "def denyVote(self): if self._vote: self.console.say('^3Vote failed!') self._vote = None self._value", "game to vote.') self._vote = None return # Check if", "already in progress if self._vote: client.message('^1ERROR^7: Vote already in progress')", "% (('^7, ^2').join(self._aMaps.keys()))) self._vote = None self._value = None return", "def getGameType(self): gametype = self.console.getCvar('g_gametype').getString() if gametype: return gametype else:", "entered') # Check if majority of players voted vNo =", "Turn killcam ^2$s^7?\", 'scorelimit': \"Scorelimit vote in progress. Change score", "# Let player know that vote is registered client.message('^3Your vote", "round length to ^2$s^7?\", 'roundlimit': \"Round limit vote in progress.", "'scorelimit': self.confirmScoreLimit() elif self._vote == 'timelimit': self.confirmTimeLimit() elif self._vote ==", "self._aVotes.keys(): if client.maxLevel >= self._aVotes[k]: allowed.insert(0, k) if len(allowed) >", "Floor, Boston, MA 02110-1301 USA # # Changelog: # v1.0.1", "self.debug('Vote aborted: Cannot vote for maps. mapvote turned out false')", "= 1 elif self._value == 'off': setting = 0 if", "client.message('^1Error:^7 could not get map list') def cmd_maprotate(self, data, client,", "\"\"\"\\ Cancel a vote in progress \"\"\" if self._vote: client.message('^3Vote", "automatic yes vote. __version__ = '1.0.2' __author__ = 'ph03n1x' import", "80, 'superadmin': 100} for (entry, value) in self.config.items('votes'): try: value", "= 0 if not isinstance(setting, int): try: setting = int(setting)", "the GNU General Public License # along with this program;", "== 2: type = data[0] value = data[1] self._vote =", "\"\"\"\\ Show all the votes you are allowed to call", "\"Timelimit vote in progress. Change time limit to ^2$s^7?\", 'roundlength':", "== 'maprotate': if self._mapRequested: self.confirmMap() else: self.console.rotateMap() elif self._vote ==", "!vote <setting> <value> - vote to change setting or cvar", "= getattr(self, cmd) return func return None ######################### VOTE TIMING", "or len(data) == 1 and data[0] == 'maprestart' or len(data)", "self.voteTimer() # Set person who called vote as yes vote", "progress. Change friendlyfire mode to ^2$s^7?\", 'killcam': \"Killcam vote in", "alias = None if len(sp) == 2: cmd, alias =", "self._aVotes[entry.lower()] = value except ValueError: self._aVotes[entry.lower()] = adLvl[value] self.debug('Allowed votes", "next mode') now = self.console.getCvar('scr_team_fftype').getInt() if now >= 1: setting", "%s' % self._aMaps[self._mapRequested]) self._mapRequested = None def confirmMaprestart(self): # This", "k) if len(allowed) > 0: p = sorted(allowed) x =", "2): self.confirmVote() def cmd_allvotes(self, data, client, cmd=None): \"\"\"\\ Show all", "client=None, cmd=None): \"\"\"\\ - list the next map in rotation", "def confirmTimeLimit(self): setting = self._value gt = self.getGameType() if not", "# along with this program; if not, write to the", "for mapname, consolename in maplist.iteritems(): if partial in mapname: a.append(mapname)", "% self._aVotes) def getCmd(self, cmd): cmd = 'cmd_%s' % cmd", "client.message('^1ERROR^7: Invalid usage. Type ^2!help vote ^7for info') return #", "a vote in progress \"\"\" if self._vote: client.message('^3Vote canceled') self.denyVote()", "return # Check if player is asking what maps can", "vote self._amt_yes.insert(0, client) if len(self._amt_yes) > (len(self._allplayers) / 2): self.confirmVote()", "'maprestart' or len(data) == 1 and data[0] == 'maps': self._vote", "{'guest': 0, 'user': 1, 'reg': 2, 'mod': 20, 'admin': 40,", "vote _votetime = 30 # Time before a vote will", "voteType): if client.maxLevel >= self._aVotes[voteType]: return True else: return False", "self.confirmRoundLength() elif self._vote == 'roundlimit': self.confirmRoundLimit() else: self.error('Unable to commit.", "self._value = None return else: client.message('^2You do not have permission", "BigBrotherBot(B3) (www.bigbrotherbot.net) # Copyright (C) 2015 ph03n1x # # This", "commands') return if self._mapRequested: cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % self._mapRequested.title())", "set new scorelimit. Voted value is not integer') return cparams", "100} for (entry, value) in self.config.items('votes'): try: value = int(value)", "fire on and off setting = self._value if not isinstance(setting,", "if c.team != b3.TEAM_SPEC: playersInGame += 1 self._allplayers.insert(0, c) if", "############################## def voteTimer(self): t1 = threading.Timer((self._votetime - 5), self.voteMessage) t1.start()", "len(self._allplayers) / 2 if vYes > vPass: self.confirmVote() def cmd_no(self,", "progress: Change map to ^3$s^7?\", 'nextmap': \"Next map vote in", "in game to vote and store present players. Only players", "else: client.message('^2You do not have permission to call map votes')", "with this program; if not, write to the Free Software", "self._value = None self._amt_no = [] self._amt_yes = [] self._allplayers", "call map votes') self._vote = None self._value = None return", "# # You should have received a copy of the", "adLvl[value] self.debug('Allowed votes are: %s' % self._aVotes) def getCmd(self, cmd):", "True else: return False ################################################################################# # COMMANDS # ################################################################################# def", "is not integer') return cparams = 'scr_' + gt +", "'nm') self.adminPlugin.registerCommand(self, 'maprotate', 20, self.cmd_maprotate, None) self.adminPlugin.registerCommand(self, 'allvotes', 1, self.cmd_allvotes,", "setting = 1 else: setting = 0 self.console.setCvar('scr_game_allowkillcam', int(setting)) def", "without even the implied warranty of # MERCHANTABILITY or FITNESS" ]
[ "v.value() for k, v in var_dict.items()} def get_base_name(var): return var.name.split(\"/\")[-1].split(\":\")[0]", "__future__ import print_function import tensorflow as tf TRAIN_NAME = \"Train\"", "2.0 (the \"License\"); # you may not use this file", "need to hold multiple values, this happens # when updating", "get_model_name_scope(var_scope): return \"/\".join((var_scope, \"Model\")) def get_update_name_scope(var_scope): return \"/\".join((var_scope, \"Update\")) def", "TRAIN_NAME)) def get_validation_name_scope(var_scope): return \"/\".join((var_scope, VALIDATION_NAME)) def get_test_name_scope(var_scope): return \"/\".join((var_scope,", "to feed in the new values. # The placeholder may", "placeholders. For each var, it generates a placeholder to feed", "def generate_update_ops(vars_): \"\"\"Generates update ops and placeholders. For each var,", "= \"personal\" def get_train_name_scope(var_scope): return \"/\".join((var_scope, TRAIN_NAME)) def get_validation_name_scope(var_scope): return", "dict_update_placeholders = {} for v in vars_: # For every", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "= \"Validation\" TEST_NAME = \"Test\" LOSS_NAME = \"loss\" LOSS_SUMMARY_NAME =", "def get_update_placeholder_name(var): var_base_name = get_base_name(var) placeholder_name = \"placeholder_%s\" % var_base_name", "get_test_name_scope(var_scope): return \"/\".join((var_scope, TEST_NAME)) def get_model_name_scope(var_scope): return \"/\".join((var_scope, \"Model\")) def", "= get_base_name(var) placeholder_name = \"placeholder_%s\" % var_base_name return placeholder_name def", "of attributeds of a class instance.\"\"\" # first start by", "Then it takes the mean of the inputs along dimension", "dict_update_placeholders: A dict of var base name to its update-placeholder.", "new values. # The placeholder may need to hold multiple", "var, it generates a placeholder to feed in the new", "return var_dict def get_var_value_ops(var_dict): return {k: v.value() for k, v", "for k, v in var_dict.items()} def get_base_name(var): return var.name.split(\"/\")[-1].split(\":\")[0] def", "\"/\".join((var_scope, VALIDATION_NAME)) def get_test_name_scope(var_scope): return \"/\".join((var_scope, TEST_NAME)) def get_model_name_scope(var_scope): return", "VARS_TYPE_PERSONAL = \"personal\" def get_train_name_scope(var_scope): return \"/\".join((var_scope, TRAIN_NAME)) def get_validation_name_scope(var_scope):", "'w') to the variable.\"\"\" var_dict = {} for v in", "for manipulating variables in Federated personalization.\"\"\" from __future__ import absolute_import", "ops. dict_update_placeholders: A dict of var base name to its", "use this file except in compliance with the License. #", "import tensorflow as tf TRAIN_NAME = \"Train\" VALIDATION_NAME = \"Validation\"", "tf.placeholder(v.dtype, shape=var_in_shape, name=var_in_name) var_in_mean = tf.reduce_mean(var_in, 0) update_op = v.assign(var_in_mean)", "def get_var_dict(vars_): \"\"\"Gets a dict of var base_name (e.g. 'w')", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "for which the update ops will be generated. Returns: update_ops:", "License. # You may obtain a copy of the License", "a placeholder to feed in the new values. Then it", "var_dict = {} for v in vars_: var_base_name = get_base_name(v)", "y in class_instance.__class__.__dict__.items() if x[:2] != \"__\") # then update", "class_instance.__class__.__dict__.items() if x[:2] != \"__\") # then update the class", "under the License is distributed on an \"AS IS\" BASIS,", "update_ops.append(update_op) dict_update_placeholders[get_base_name(v)] = var_in return update_ops, dict_update_placeholders def print_vars_on_clients(clients, sess):", "License for the specific language governing permissions and # limitations", "each var, it generates a placeholder to feed in the", "a placeholder to feed in the new values. # The", "type. VARS_TYPE_ALL = \"all\" VARS_TYPE_SHARED = \"shared\" VARS_TYPE_PERSONAL = \"personal\"", "add_prefix(prefix, name): \"\"\"Adds prefix to name.\"\"\" return \"/\".join((prefix, name)) def", "var_base_name = get_base_name(var) placeholder_name = \"placeholder_%s\" % var_base_name return placeholder_name", "= var_in return update_ops, dict_update_placeholders def print_vars_on_clients(clients, sess): for c", "variables in Federated personalization.\"\"\" from __future__ import absolute_import from __future__", "Vars type. VARS_TYPE_ALL = \"all\" VARS_TYPE_SHARED = \"shared\" VARS_TYPE_PERSONAL =", "For every var in the scope, add a placeholder to", "\"/\".join((var_scope, TRAIN_NAME)) def get_validation_name_scope(var_scope): return \"/\".join((var_scope, VALIDATION_NAME)) def get_test_name_scope(var_scope): return", "from __future__ import absolute_import from __future__ import division from __future__", "in compliance with the License. # You may obtain a", "name)) def add_suffix(suffix, name): \"\"\"Adds subfix to name.\"\"\" return \"/\".join((name,", "software # distributed under the License is distributed on an", "def get_train_name_scope(var_scope): return \"/\".join((var_scope, TRAIN_NAME)) def get_validation_name_scope(var_scope): return \"/\".join((var_scope, VALIDATION_NAME))", "= \"placeholder_%s\" % var_base_name return placeholder_name def generate_update_ops(vars_): \"\"\"Generates update", "list of update ops. dict_update_placeholders: A dict of var base", "manipulating variables in Federated personalization.\"\"\" from __future__ import absolute_import from", "LOSS_SUMMARY_NAME = \"perplexity\" # Vars type. VARS_TYPE_ALL = \"all\" VARS_TYPE_SHARED", "grabbing the Class items attribute_dict = dict((x, y) for x,", "name): \"\"\"Adds prefix to name.\"\"\" return \"/\".join((prefix, name)) def add_suffix(suffix,", "return update_ops, dict_update_placeholders def print_vars_on_clients(clients, sess): for c in clients.values():", "c in clients.values(): print(\"client %d:\" % c.id) print(sess.run(c.read_ops_all_vars)) def add_prefix(prefix,", "update-placeholder. \"\"\" update_ops = [] dict_update_placeholders = {} for v", "the new values. # The placeholder may need to hold", "__future__ import division from __future__ import print_function import tensorflow as", "var_base_name return placeholder_name def generate_update_ops(vars_): \"\"\"Generates update ops and placeholders.", "it takes the mean of the inputs along dimension 0.", "y) for x, y in class_instance.__class__.__dict__.items() if x[:2] != \"__\")", "= [None] + v.shape.as_list() var_in_name = get_update_placeholder_name(v) var_in = tf.placeholder(v.dtype,", "return var.name.split(\"/\")[-1].split(\":\")[0] def get_update_name(var, var_scope): var_base_name = get_base_name(var) var_update_name =", "class instance.\"\"\" # first start by grabbing the Class items", "add a placeholder to feed in the new values. #", "= [] dict_update_placeholders = {} for v in vars_: #", "\"perplexity\" # Vars type. VARS_TYPE_ALL = \"all\" VARS_TYPE_SHARED = \"shared\"", "\"/\".join((var_scope, TEST_NAME)) def get_model_name_scope(var_scope): return \"/\".join((var_scope, \"Model\")) def get_update_name_scope(var_scope): return", "in Federated personalization.\"\"\" from __future__ import absolute_import from __future__ import", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "the update ops will be generated. Returns: update_ops: A list", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "tf TRAIN_NAME = \"Train\" VALIDATION_NAME = \"Validation\" TEST_NAME = \"Test\"", "return \"/\".join((prefix, name)) def add_suffix(suffix, name): \"\"\"Adds subfix to name.\"\"\"", "in class_instance.__class__.__dict__.items() if x[:2] != \"__\") # then update the", "\"shared\" VARS_TYPE_PERSONAL = \"personal\" def get_train_name_scope(var_scope): return \"/\".join((var_scope, TRAIN_NAME)) def", "update ops will be generated. Returns: update_ops: A list of", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "{k: v.value() for k, v in var_dict.items()} def get_base_name(var): return", "to in writing, software # distributed under the License is", "\"\"\" update_ops = [] dict_update_placeholders = {} for v in", "# See the License for the specific language governing permissions", "get_update_name(var, var_scope): var_base_name = get_base_name(var) var_update_name = \"update_%s_%s\" % (var_scope,", "VARS_TYPE_ALL = \"all\" VARS_TYPE_SHARED = \"shared\" VARS_TYPE_PERSONAL = \"personal\" def", "% var_base_name return placeholder_name def generate_update_ops(vars_): \"\"\"Generates update ops and", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "c.id) print(sess.run(c.read_ops_all_vars)) def add_prefix(prefix, name): \"\"\"Adds prefix to name.\"\"\" return", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "by grabbing the Class items attribute_dict = dict((x, y) for", "with the License. # You may obtain a copy of", "in var_dict.items()} def get_base_name(var): return var.name.split(\"/\")[-1].split(\":\")[0] def get_update_name(var, var_scope): var_base_name", "values. Then it takes the mean of the inputs along", "The placeholder may need to hold multiple values, this happens", "\"/\".join((var_scope, \"Model\")) def get_update_name_scope(var_scope): return \"/\".join((var_scope, \"Update\")) def get_var_dict(vars_): \"\"\"Gets", "get_base_name(var) var_update_name = \"update_%s_%s\" % (var_scope, var_base_name) return var_update_name def", "return \"/\".join((name, suffix)) def get_attribute_dict(class_instance): \"\"\"Gets a dict of attributeds", "then update the class items with the instance items attribute_dict.update(class_instance.__dict__)", "{} for v in vars_: # For every var in", "\"/\".join((prefix, name)) def add_suffix(suffix, name): \"\"\"Adds subfix to name.\"\"\" return", "update_op = v.assign(var_in_mean) update_ops.append(update_op) dict_update_placeholders[get_base_name(v)] = var_in return update_ops, dict_update_placeholders", "= tf.reduce_mean(var_in, 0) update_op = v.assign(var_in_mean) update_ops.append(update_op) dict_update_placeholders[get_base_name(v)] = var_in", "var_update_name def get_update_placeholder_name(var): var_base_name = get_base_name(var) placeholder_name = \"placeholder_%s\" %", "every var in the scope, add a placeholder to feed", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "var_base_name = get_base_name(var) var_update_name = \"update_%s_%s\" % (var_scope, var_base_name) return", "vars_: var_base_name = get_base_name(v) var_dict[var_base_name] = v return var_dict def", "distributed under the License is distributed on an \"AS IS\"", "var_in = tf.placeholder(v.dtype, shape=var_in_shape, name=var_in_name) var_in_mean = tf.reduce_mean(var_in, 0) update_op", "generated. Returns: update_ops: A list of update ops. dict_update_placeholders: A", "shape=var_in_shape, name=var_in_name) var_in_mean = tf.reduce_mean(var_in, 0) update_op = v.assign(var_in_mean) update_ops.append(update_op)", "express or implied. # See the License for the specific", "along dimension 0. Args: vars_: Vars for which the update", "except in compliance with the License. # You may obtain", "it generates a placeholder to feed in the new values.", "# The placeholder may need to hold multiple values, this", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "writing, software # distributed under the License is distributed on", "mean of the inputs along dimension 0. Args: vars_: Vars", "governing permissions and # limitations under the License. \"\"\"Utility functions", "in the scope, add a placeholder to feed in the", "you may not use this file except in compliance with", "import print_function import tensorflow as tf TRAIN_NAME = \"Train\" VALIDATION_NAME", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "to the variable.\"\"\" var_dict = {} for v in vars_:", "placeholder to feed in the new values. # The placeholder", "dict_update_placeholders[get_base_name(v)] = var_in return update_ops, dict_update_placeholders def print_vars_on_clients(clients, sess): for", "a class instance.\"\"\" # first start by grabbing the Class", "CONDITIONS OF ANY KIND, either express or implied. # See", "dimension 0. Args: vars_: Vars for which the update ops", "x, y in class_instance.__class__.__dict__.items() if x[:2] != \"__\") # then", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "tensorflow as tf TRAIN_NAME = \"Train\" VALIDATION_NAME = \"Validation\" TEST_NAME", "print_vars_on_clients(clients, sess): for c in clients.values(): print(\"client %d:\" % c.id)", "the new values. Then it takes the mean of the", "the Class items attribute_dict = dict((x, y) for x, y", "\"Validation\" TEST_NAME = \"Test\" LOSS_NAME = \"loss\" LOSS_SUMMARY_NAME = \"perplexity\"", "print(sess.run(c.read_ops_all_vars)) def add_prefix(prefix, name): \"\"\"Adds prefix to name.\"\"\" return \"/\".join((prefix,", "limitations under the License. \"\"\"Utility functions for manipulating variables in", "the mean of the inputs along dimension 0. Args: vars_:", "Vars for which the update ops will be generated. Returns:", "variable.\"\"\" var_dict = {} for v in vars_: var_base_name =", "from many clients. var_in_shape = [None] + v.shape.as_list() var_in_name =", "subfix to name.\"\"\" return \"/\".join((name, suffix)) def get_attribute_dict(class_instance): \"\"\"Gets a", "def get_update_name(var, var_scope): var_base_name = get_base_name(var) var_update_name = \"update_%s_%s\" %", "vars_: Vars for which the update ops will be generated.", "OR CONDITIONS OF ANY KIND, either express or implied. #", "TRAIN_NAME = \"Train\" VALIDATION_NAME = \"Validation\" TEST_NAME = \"Test\" LOSS_NAME", "= get_base_name(var) var_update_name = \"update_%s_%s\" % (var_scope, var_base_name) return var_update_name", "the License is distributed on an \"AS IS\" BASIS, #", "= \"Test\" LOSS_NAME = \"loss\" LOSS_SUMMARY_NAME = \"perplexity\" # Vars", "def get_validation_name_scope(var_scope): return \"/\".join((var_scope, VALIDATION_NAME)) def get_test_name_scope(var_scope): return \"/\".join((var_scope, TEST_NAME))", "the class items with the instance items attribute_dict.update(class_instance.__dict__) return attribute_dict", "values. # The placeholder may need to hold multiple values,", "dict_update_placeholders def print_vars_on_clients(clients, sess): for c in clients.values(): print(\"client %d:\"", "print(\"client %d:\" % c.id) print(sess.run(c.read_ops_all_vars)) def add_prefix(prefix, name): \"\"\"Adds prefix", "\"/\".join((var_scope, \"Update\")) def get_var_dict(vars_): \"\"\"Gets a dict of var base_name", "def get_base_name(var): return var.name.split(\"/\")[-1].split(\":\")[0] def get_update_name(var, var_scope): var_base_name = get_base_name(var)", "k, v in var_dict.items()} def get_base_name(var): return var.name.split(\"/\")[-1].split(\":\")[0] def get_update_name(var,", "of a class instance.\"\"\" # first start by grabbing the", "hold multiple values, this happens # when updating the server", "generate_update_ops(vars_): \"\"\"Generates update ops and placeholders. For each var, it", "law or agreed to in writing, software # distributed under", "(var_scope, var_base_name) return var_update_name def get_update_placeholder_name(var): var_base_name = get_base_name(var) placeholder_name", "the scope, add a placeholder to feed in the new", "TEST_NAME = \"Test\" LOSS_NAME = \"loss\" LOSS_SUMMARY_NAME = \"perplexity\" #", "var_dict def get_var_value_ops(var_dict): return {k: v.value() for k, v in", "%d:\" % c.id) print(sess.run(c.read_ops_all_vars)) def add_prefix(prefix, name): \"\"\"Adds prefix to", "Copyright 2018 Google Inc. # # Licensed under the Apache", "feed in the new values. # The placeholder may need", "v in var_dict.items()} def get_base_name(var): return var.name.split(\"/\")[-1].split(\":\")[0] def get_update_name(var, var_scope):", "the inputs along dimension 0. Args: vars_: Vars for which", "permissions and # limitations under the License. \"\"\"Utility functions for", "may obtain a copy of the License at # #", "var_base_name = get_base_name(v) var_dict[var_base_name] = v return var_dict def get_var_value_ops(var_dict):", "may need to hold multiple values, this happens # when", "\"loss\" LOSS_SUMMARY_NAME = \"perplexity\" # Vars type. VARS_TYPE_ALL = \"all\"", "+ v.shape.as_list() var_in_name = get_update_placeholder_name(v) var_in = tf.placeholder(v.dtype, shape=var_in_shape, name=var_in_name)", "name=var_in_name) var_in_mean = tf.reduce_mean(var_in, 0) update_op = v.assign(var_in_mean) update_ops.append(update_op) dict_update_placeholders[get_base_name(v)]", "def get_update_name_scope(var_scope): return \"/\".join((var_scope, \"Update\")) def get_var_dict(vars_): \"\"\"Gets a dict", "v in vars_: var_base_name = get_base_name(v) var_dict[var_base_name] = v return", "A dict of var base name to its update-placeholder. \"\"\"", "inputs along dimension 0. Args: vars_: Vars for which the", "the License. \"\"\"Utility functions for manipulating variables in Federated personalization.\"\"\"", "get_attribute_dict(class_instance): \"\"\"Gets a dict of attributeds of a class instance.\"\"\"", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Inc. # # Licensed under the Apache License, Version 2.0", "get_base_name(var): return var.name.split(\"/\")[-1].split(\":\")[0] def get_update_name(var, var_scope): var_base_name = get_base_name(var) var_update_name", "update ops and placeholders. For each var, it generates a", "may not use this file except in compliance with the", "Args: vars_: Vars for which the update ops will be", "language governing permissions and # limitations under the License. \"\"\"Utility", "var_scope): var_base_name = get_base_name(var) var_update_name = \"update_%s_%s\" % (var_scope, var_base_name)", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "= {} for v in vars_: # For every var", "this file except in compliance with the License. # You", "to name.\"\"\" return \"/\".join((name, suffix)) def get_attribute_dict(class_instance): \"\"\"Gets a dict", "import absolute_import from __future__ import division from __future__ import print_function", "(e.g. 'w') to the variable.\"\"\" var_dict = {} for v", "Class items attribute_dict = dict((x, y) for x, y in", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "Returns: update_ops: A list of update ops. dict_update_placeholders: A dict", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "= \"shared\" VARS_TYPE_PERSONAL = \"personal\" def get_train_name_scope(var_scope): return \"/\".join((var_scope, TRAIN_NAME))", "in vars_: var_base_name = get_base_name(v) var_dict[var_base_name] = v return var_dict", "= \"all\" VARS_TYPE_SHARED = \"shared\" VARS_TYPE_PERSONAL = \"personal\" def get_train_name_scope(var_scope):", "under the License. \"\"\"Utility functions for manipulating variables in Federated", "[] dict_update_placeholders = {} for v in vars_: # For", "for c in clients.values(): print(\"client %d:\" % c.id) print(sess.run(c.read_ops_all_vars)) def", "attributeds of a class instance.\"\"\" # first start by grabbing", "def get_test_name_scope(var_scope): return \"/\".join((var_scope, TEST_NAME)) def get_model_name_scope(var_scope): return \"/\".join((var_scope, \"Model\"))", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "= get_update_placeholder_name(v) var_in = tf.placeholder(v.dtype, shape=var_in_shape, name=var_in_name) var_in_mean = tf.reduce_mean(var_in,", "for x, y in class_instance.__class__.__dict__.items() if x[:2] != \"__\") #", "to feed in the new values. Then it takes the", "var base name to its update-placeholder. \"\"\" update_ops = []", "updating the server from many clients. var_in_shape = [None] +", "def print_vars_on_clients(clients, sess): for c in clients.values(): print(\"client %d:\" %", "0. Args: vars_: Vars for which the update ops will", "<gh_stars>1-10 # Copyright 2018 Google Inc. # # Licensed under", "a dict of var base_name (e.g. 'w') to the variable.\"\"\"", "def get_attribute_dict(class_instance): \"\"\"Gets a dict of attributeds of a class", "in the new values. # The placeholder may need to", "update_ops = [] dict_update_placeholders = {} for v in vars_:", "clients. var_in_shape = [None] + v.shape.as_list() var_in_name = get_update_placeholder_name(v) var_in", "# first start by grabbing the Class items attribute_dict =", "return placeholder_name def generate_update_ops(vars_): \"\"\"Generates update ops and placeholders. For", "get_var_dict(vars_): \"\"\"Gets a dict of var base_name (e.g. 'w') to", "v return var_dict def get_var_value_ops(var_dict): return {k: v.value() for k,", "name): \"\"\"Adds subfix to name.\"\"\" return \"/\".join((name, suffix)) def get_attribute_dict(class_instance):", "[None] + v.shape.as_list() var_in_name = get_update_placeholder_name(v) var_in = tf.placeholder(v.dtype, shape=var_in_shape,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "\"personal\" def get_train_name_scope(var_scope): return \"/\".join((var_scope, TRAIN_NAME)) def get_validation_name_scope(var_scope): return \"/\".join((var_scope,", "0) update_op = v.assign(var_in_mean) update_ops.append(update_op) dict_update_placeholders[get_base_name(v)] = var_in return update_ops,", "clients.values(): print(\"client %d:\" % c.id) print(sess.run(c.read_ops_all_vars)) def add_prefix(prefix, name): \"\"\"Adds", "as tf TRAIN_NAME = \"Train\" VALIDATION_NAME = \"Validation\" TEST_NAME =", "\"update_%s_%s\" % (var_scope, var_base_name) return var_update_name def get_update_placeholder_name(var): var_base_name =", "values, this happens # when updating the server from many", "or implied. # See the License for the specific language", "var in the scope, add a placeholder to feed in", "def add_suffix(suffix, name): \"\"\"Adds subfix to name.\"\"\" return \"/\".join((name, suffix))", "placeholder_name = \"placeholder_%s\" % var_base_name return placeholder_name def generate_update_ops(vars_): \"\"\"Generates", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "var_dict.items()} def get_base_name(var): return var.name.split(\"/\")[-1].split(\":\")[0] def get_update_name(var, var_scope): var_base_name =", "= dict((x, y) for x, y in class_instance.__class__.__dict__.items() if x[:2]", "var.name.split(\"/\")[-1].split(\":\")[0] def get_update_name(var, var_scope): var_base_name = get_base_name(var) var_update_name = \"update_%s_%s\"", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "for v in vars_: var_base_name = get_base_name(v) var_dict[var_base_name] = v", "Google Inc. # # Licensed under the Apache License, Version", "v.assign(var_in_mean) update_ops.append(update_op) dict_update_placeholders[get_base_name(v)] = var_in return update_ops, dict_update_placeholders def print_vars_on_clients(clients,", "(the \"License\"); # you may not use this file except", "dict((x, y) for x, y in class_instance.__class__.__dict__.items() if x[:2] !=", "# you may not use this file except in compliance", "= \"perplexity\" # Vars type. VARS_TYPE_ALL = \"all\" VARS_TYPE_SHARED =", "var_in_mean = tf.reduce_mean(var_in, 0) update_op = v.assign(var_in_mean) update_ops.append(update_op) dict_update_placeholders[get_base_name(v)] =", "new values. Then it takes the mean of the inputs", "of var base_name (e.g. 'w') to the variable.\"\"\" var_dict =", "to its update-placeholder. \"\"\" update_ops = [] dict_update_placeholders = {}", "# Vars type. VARS_TYPE_ALL = \"all\" VARS_TYPE_SHARED = \"shared\" VARS_TYPE_PERSONAL", "the variable.\"\"\" var_dict = {} for v in vars_: var_base_name", "# # Unless required by applicable law or agreed to", "print_function import tensorflow as tf TRAIN_NAME = \"Train\" VALIDATION_NAME =", "multiple values, this happens # when updating the server from", "name.\"\"\" return \"/\".join((name, suffix)) def get_attribute_dict(class_instance): \"\"\"Gets a dict of", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "which the update ops will be generated. Returns: update_ops: A", "from __future__ import division from __future__ import print_function import tensorflow", "\"\"\"Adds subfix to name.\"\"\" return \"/\".join((name, suffix)) def get_attribute_dict(class_instance): \"\"\"Gets", "Version 2.0 (the \"License\"); # you may not use this", "when updating the server from many clients. var_in_shape = [None]", "update_ops, dict_update_placeholders def print_vars_on_clients(clients, sess): for c in clients.values(): print(\"client", "get_update_placeholder_name(v) var_in = tf.placeholder(v.dtype, shape=var_in_shape, name=var_in_name) var_in_mean = tf.reduce_mean(var_in, 0)", "get_train_name_scope(var_scope): return \"/\".join((var_scope, TRAIN_NAME)) def get_validation_name_scope(var_scope): return \"/\".join((var_scope, VALIDATION_NAME)) def", "get_validation_name_scope(var_scope): return \"/\".join((var_scope, VALIDATION_NAME)) def get_test_name_scope(var_scope): return \"/\".join((var_scope, TEST_NAME)) def", "% c.id) print(sess.run(c.read_ops_all_vars)) def add_prefix(prefix, name): \"\"\"Adds prefix to name.\"\"\"", "functions for manipulating variables in Federated personalization.\"\"\" from __future__ import", "return \"/\".join((var_scope, VALIDATION_NAME)) def get_test_name_scope(var_scope): return \"/\".join((var_scope, TEST_NAME)) def get_model_name_scope(var_scope):", "__future__ import absolute_import from __future__ import division from __future__ import", "def get_var_value_ops(var_dict): return {k: v.value() for k, v in var_dict.items()}", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "var_in_name = get_update_placeholder_name(v) var_in = tf.placeholder(v.dtype, shape=var_in_shape, name=var_in_name) var_in_mean =", "= \"Train\" VALIDATION_NAME = \"Validation\" TEST_NAME = \"Test\" LOSS_NAME =", "by applicable law or agreed to in writing, software #", "= {} for v in vars_: var_base_name = get_base_name(v) var_dict[var_base_name]", "var_update_name = \"update_%s_%s\" % (var_scope, var_base_name) return var_update_name def get_update_placeholder_name(var):", "to name.\"\"\" return \"/\".join((prefix, name)) def add_suffix(suffix, name): \"\"\"Adds subfix", "personalization.\"\"\" from __future__ import absolute_import from __future__ import division from", "return {k: v.value() for k, v in var_dict.items()} def get_base_name(var):", "instance.\"\"\" # first start by grabbing the Class items attribute_dict", "get_base_name(v) var_dict[var_base_name] = v return var_dict def get_var_value_ops(var_dict): return {k:", "ops will be generated. Returns: update_ops: A list of update", "get_update_placeholder_name(var): var_base_name = get_base_name(var) placeholder_name = \"placeholder_%s\" % var_base_name return", "VALIDATION_NAME = \"Validation\" TEST_NAME = \"Test\" LOSS_NAME = \"loss\" LOSS_SUMMARY_NAME", "in the new values. Then it takes the mean of", "vars_: # For every var in the scope, add a", "\"\"\"Generates update ops and placeholders. For each var, it generates", "= \"update_%s_%s\" % (var_scope, var_base_name) return var_update_name def get_update_placeholder_name(var): var_base_name", "from __future__ import print_function import tensorflow as tf TRAIN_NAME =", "return \"/\".join((var_scope, TRAIN_NAME)) def get_validation_name_scope(var_scope): return \"/\".join((var_scope, VALIDATION_NAME)) def get_test_name_scope(var_scope):", "= v.assign(var_in_mean) update_ops.append(update_op) dict_update_placeholders[get_base_name(v)] = var_in return update_ops, dict_update_placeholders def", "name.\"\"\" return \"/\".join((prefix, name)) def add_suffix(suffix, name): \"\"\"Adds subfix to", "placeholder to feed in the new values. Then it takes", "\"__\") # then update the class items with the instance", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "Unless required by applicable law or agreed to in writing,", "\"\"\"Utility functions for manipulating variables in Federated personalization.\"\"\" from __future__", "dict of attributeds of a class instance.\"\"\" # first start", "the specific language governing permissions and # limitations under the", "suffix)) def get_attribute_dict(class_instance): \"\"\"Gets a dict of attributeds of a", "\"Model\")) def get_update_name_scope(var_scope): return \"/\".join((var_scope, \"Update\")) def get_var_dict(vars_): \"\"\"Gets a", "to hold multiple values, this happens # when updating the", "happens # when updating the server from many clients. var_in_shape", "Federated personalization.\"\"\" from __future__ import absolute_import from __future__ import division", "v.shape.as_list() var_in_name = get_update_placeholder_name(v) var_in = tf.placeholder(v.dtype, shape=var_in_shape, name=var_in_name) var_in_mean", "applicable law or agreed to in writing, software # distributed", "# Copyright 2018 Google Inc. # # Licensed under the", "var_in_shape = [None] + v.shape.as_list() var_in_name = get_update_placeholder_name(v) var_in =", "if x[:2] != \"__\") # then update the class items", "attribute_dict = dict((x, y) for x, y in class_instance.__class__.__dict__.items() if", "def get_model_name_scope(var_scope): return \"/\".join((var_scope, \"Model\")) def get_update_name_scope(var_scope): return \"/\".join((var_scope, \"Update\"))", "in writing, software # distributed under the License is distributed", "= \"loss\" LOSS_SUMMARY_NAME = \"perplexity\" # Vars type. VARS_TYPE_ALL =", "its update-placeholder. \"\"\" update_ops = [] dict_update_placeholders = {} for", "start by grabbing the Class items attribute_dict = dict((x, y)", "this happens # when updating the server from many clients.", "2018 Google Inc. # # Licensed under the Apache License,", "\"placeholder_%s\" % var_base_name return placeholder_name def generate_update_ops(vars_): \"\"\"Generates update ops", "# For every var in the scope, add a placeholder", "var_dict[var_base_name] = v return var_dict def get_var_value_ops(var_dict): return {k: v.value()", "x[:2] != \"__\") # then update the class items with", "update ops. dict_update_placeholders: A dict of var base name to", "takes the mean of the inputs along dimension 0. Args:", "server from many clients. var_in_shape = [None] + v.shape.as_list() var_in_name", "in vars_: # For every var in the scope, add", "in clients.values(): print(\"client %d:\" % c.id) print(sess.run(c.read_ops_all_vars)) def add_prefix(prefix, name):", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "# You may obtain a copy of the License at", "and placeholders. For each var, it generates a placeholder to", "get_var_value_ops(var_dict): return {k: v.value() for k, v in var_dict.items()} def", "be generated. Returns: update_ops: A list of update ops. dict_update_placeholders:", "return \"/\".join((var_scope, \"Model\")) def get_update_name_scope(var_scope): return \"/\".join((var_scope, \"Update\")) def get_var_dict(vars_):", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "\"\"\"Adds prefix to name.\"\"\" return \"/\".join((prefix, name)) def add_suffix(suffix, name):", "of the inputs along dimension 0. Args: vars_: Vars for", "= v return var_dict def get_var_value_ops(var_dict): return {k: v.value() for", "get_base_name(var) placeholder_name = \"placeholder_%s\" % var_base_name return placeholder_name def generate_update_ops(vars_):", "for v in vars_: # For every var in the", "var base_name (e.g. 'w') to the variable.\"\"\" var_dict = {}", "tf.reduce_mean(var_in, 0) update_op = v.assign(var_in_mean) update_ops.append(update_op) dict_update_placeholders[get_base_name(v)] = var_in return", "var_in return update_ops, dict_update_placeholders def print_vars_on_clients(clients, sess): for c in", "the License for the specific language governing permissions and #", "placeholder may need to hold multiple values, this happens #", "# then update the class items with the instance items", "Apache License, Version 2.0 (the \"License\"); # you may not", "scope, add a placeholder to feed in the new values.", "VALIDATION_NAME)) def get_test_name_scope(var_scope): return \"/\".join((var_scope, TEST_NAME)) def get_model_name_scope(var_scope): return \"/\".join((var_scope,", "either express or implied. # See the License for the", "update_ops: A list of update ops. dict_update_placeholders: A dict of", "\"Train\" VALIDATION_NAME = \"Validation\" TEST_NAME = \"Test\" LOSS_NAME = \"loss\"", "# when updating the server from many clients. var_in_shape =", "\"\"\"Gets a dict of attributeds of a class instance.\"\"\" #", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "a dict of attributeds of a class instance.\"\"\" # first", "\"/\".join((name, suffix)) def get_attribute_dict(class_instance): \"\"\"Gets a dict of attributeds of", "A list of update ops. dict_update_placeholders: A dict of var", "of update ops. dict_update_placeholders: A dict of var base name", "var_base_name) return var_update_name def get_update_placeholder_name(var): var_base_name = get_base_name(var) placeholder_name =", "generates a placeholder to feed in the new values. Then", "base name to its update-placeholder. \"\"\" update_ops = [] dict_update_placeholders", "LOSS_NAME = \"loss\" LOSS_SUMMARY_NAME = \"perplexity\" # Vars type. VARS_TYPE_ALL", "def add_prefix(prefix, name): \"\"\"Adds prefix to name.\"\"\" return \"/\".join((prefix, name))", "prefix to name.\"\"\" return \"/\".join((prefix, name)) def add_suffix(suffix, name): \"\"\"Adds", "base_name (e.g. 'w') to the variable.\"\"\" var_dict = {} for", "For each var, it generates a placeholder to feed in", "dict of var base_name (e.g. 'w') to the variable.\"\"\" var_dict", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "\"Update\")) def get_var_dict(vars_): \"\"\"Gets a dict of var base_name (e.g.", "placeholder_name def generate_update_ops(vars_): \"\"\"Generates update ops and placeholders. For each", "= tf.placeholder(v.dtype, shape=var_in_shape, name=var_in_name) var_in_mean = tf.reduce_mean(var_in, 0) update_op =", "return \"/\".join((var_scope, \"Update\")) def get_var_dict(vars_): \"\"\"Gets a dict of var", "the server from many clients. var_in_shape = [None] + v.shape.as_list()", "# limitations under the License. \"\"\"Utility functions for manipulating variables", "items attribute_dict = dict((x, y) for x, y in class_instance.__class__.__dict__.items()", "and # limitations under the License. \"\"\"Utility functions for manipulating", "absolute_import from __future__ import division from __future__ import print_function import", "{} for v in vars_: var_base_name = get_base_name(v) var_dict[var_base_name] =", "name to its update-placeholder. \"\"\" update_ops = [] dict_update_placeholders =", "ops and placeholders. For each var, it generates a placeholder", "% (var_scope, var_base_name) return var_update_name def get_update_placeholder_name(var): var_base_name = get_base_name(var)", "\"License\"); # you may not use this file except in", "get_update_name_scope(var_scope): return \"/\".join((var_scope, \"Update\")) def get_var_dict(vars_): \"\"\"Gets a dict of", "\"all\" VARS_TYPE_SHARED = \"shared\" VARS_TYPE_PERSONAL = \"personal\" def get_train_name_scope(var_scope): return", "feed in the new values. Then it takes the mean", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# distributed under the License is distributed on an \"AS", "dict of var base name to its update-placeholder. \"\"\" update_ops", "!= \"__\") # then update the class items with the", "# Unless required by applicable law or agreed to in", "VARS_TYPE_SHARED = \"shared\" VARS_TYPE_PERSONAL = \"personal\" def get_train_name_scope(var_scope): return \"/\".join((var_scope,", "TEST_NAME)) def get_model_name_scope(var_scope): return \"/\".join((var_scope, \"Model\")) def get_update_name_scope(var_scope): return \"/\".join((var_scope,", "many clients. var_in_shape = [None] + v.shape.as_list() var_in_name = get_update_placeholder_name(v)", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "division from __future__ import print_function import tensorflow as tf TRAIN_NAME", "v in vars_: # For every var in the scope,", "return var_update_name def get_update_placeholder_name(var): var_base_name = get_base_name(var) placeholder_name = \"placeholder_%s\"", "You may obtain a copy of the License at #", "import division from __future__ import print_function import tensorflow as tf", "\"\"\"Gets a dict of var base_name (e.g. 'w') to the", "= get_base_name(v) var_dict[var_base_name] = v return var_dict def get_var_value_ops(var_dict): return", "return \"/\".join((var_scope, TEST_NAME)) def get_model_name_scope(var_scope): return \"/\".join((var_scope, \"Model\")) def get_update_name_scope(var_scope):", "\"Test\" LOSS_NAME = \"loss\" LOSS_SUMMARY_NAME = \"perplexity\" # Vars type.", "sess): for c in clients.values(): print(\"client %d:\" % c.id) print(sess.run(c.read_ops_all_vars))", "will be generated. Returns: update_ops: A list of update ops.", "update the class items with the instance items attribute_dict.update(class_instance.__dict__) return", "the Apache License, Version 2.0 (the \"License\"); # you may", "of var base name to its update-placeholder. \"\"\" update_ops =", "License. \"\"\"Utility functions for manipulating variables in Federated personalization.\"\"\" from", "add_suffix(suffix, name): \"\"\"Adds subfix to name.\"\"\" return \"/\".join((name, suffix)) def", "first start by grabbing the Class items attribute_dict = dict((x," ]
[ "'\\n') # ---------------------------------------------- # collection.remove Unit Testing: Error Conditions #", "crud = collection.remove() crud = collection.remove(' ') crud = collection.remove(5)", "print('Records Left:', len(docs), '\\n') //! [CollectionRemove: remove under condition] #@", "> :years').execute() crud = collection.remove('name = :data and age >", "with binding print('Deleted donna:', result.affected_items_count, '\\n') result=crud.bind('data', 'alma').execute() print('Deleted alma:',", "#@ CollectionRemove: remove under condition //! [CollectionRemove: remove under condition]", "#@ CollectionRemove: full remove //! [CollectionRemove: full remove] result =", "collection.remove('some_condition').sort([]) crud = collection.remove('some_condition').sort(['name', 5]) crud = collection.remove('some_condition').sort('name', 5) #@#", "5).execute() # --------------------------------------- # collection.remove Unit Testing: Execution # ---------------------------------------", "'jack', \"age\": 17, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA02\", \"name\":", "collection.remove('age = 15').execute() print('Affected Rows:', result.affected_items_count, '\\n') docs = collection.find().execute().fetch_all()", ":years').execute() crud = collection.remove('name = :data and age > :years').bind('years',", "on execute crud = collection.remove('name = :data and age >", "\"name\": 'angel', \"age\": 14, \"gender\": 'male'}).execute() # ------------------------------------------------ # collection.remove", "> :years').bind('another', 5) #@# CollectionRemove: Error conditions on execute crud", ":data and age > :years').execute() crud = collection.remove('name = :data", ":years').bind() crud = collection.remove('name = :data and age > :years').bind(5,", "\"name\": 'jack', \"age\": 17, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA02\",", "\"name\": 'donna', \"age\": 16, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA07\",", "\"2') #@# CollectionRemove: Error conditions sort crud = collection.remove('some_condition').sort() crud", "#@ CollectionRemove: valid operations after sort crud = crud.sort(['name']) validate_crud_functions(crud,", "remove with binding //! [CollectionRemove: remove with binding] result =", "crud = collection.remove('name = :data and age > :years').execute() crud", "> :years').bind(5, 5) crud = collection.remove('name = :data and age", "operations after remove crud = collection.remove('some_condition') validate_crud_functions(crud, ['sort', 'limit', 'bind',", "\"3C514FF38144B714E7119BCF48B4CA03\", \"name\": 'brian', \"age\": 14, \"gender\": 'male'}).execute() result = collection.add({\"_id\":", "\"3C514FF38144B714E7119BCF48B4CA06\", \"name\": 'donna', \"age\": 16, \"gender\": 'female'}).execute() result = collection.add({\"_id\":", "= collection.remove('some_condition').sort([]) crud = collection.remove('some_condition').sort(['name', 5]) crud = collection.remove('some_condition').sort('name', 5)", "collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA01\", \"name\": 'jack', \"age\": 17, \"gender\": 'male'}).execute() result =", "result = collection.remove('1').execute() print('Affected Rows:', result.affected_items_count, '\\n') docs = collection.find().execute().fetch_all()", ":data and age > :years').bind('another', 5) #@# CollectionRemove: Error conditions", "age > :years').execute() crud = collection.remove('name = :data and age", ":data').bind('data', 'donna') validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove: valid operations after", "full remove //! [CollectionRemove: full remove] result = collection.remove('1').execute() print('Affected", "'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA03\", \"name\": 'brian', \"age\": 14, \"gender\":", "= collection.remove('name = :data and age > :years').bind() crud =", "and inserts data into it collection = schema.create_collection('collection1') result =", "binding print('Deleted donna:', result.affected_items_count, '\\n') result=crud.bind('data', 'alma').execute() print('Deleted alma:', result.affected_items_count,", "result=crud.bind('data', 'alma').execute() print('Deleted alma:', result.affected_items_count, '\\n') # ---------------------------------------------- # collection.remove", "collection.remove('name = :data and age > :years').bind() crud = collection.remove('name", "crud = collection.remove(5) crud = collection.remove('test = \"2') #@# CollectionRemove:", "[CollectionRemove: remove with binding] result = collection.remove('gender = :heorshe').limit(2).bind('heorshe', 'male').execute()", "CollectionRemove: Error conditions sort crud = collection.remove('some_condition').sort() crud = collection.remove('some_condition').sort(5)", "'adam', \"age\": 15, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA03\", \"name\":", "\"name\": 'alma', \"age\": 13, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA05\",", "it collection = schema.create_collection('collection1') result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA01\", \"name\": 'jack',", "with binding] result = collection.remove('gender = :heorshe').limit(2).bind('heorshe', 'male').execute() print('Affected Rows:',", "binding] result = collection.remove('gender = :heorshe').limit(2).bind('heorshe', 'male').execute() print('Affected Rows:', result.affected_items_count,", "data into it collection = schema.create_collection('collection1') result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA01\",", "operations after execute result = crud.execute() validate_crud_functions(crud, ['limit', 'bind', 'execute'])", "--------------------------------------- #@ CollectionRemove: remove under condition //! [CollectionRemove: remove under", "crud = collection.remove('test = \"2') #@# CollectionRemove: Error conditions sort", "from mysqlsh import mysqlx mySession = mysqlx.get_session(__uripwd) ensure_schema_does_not_exist(mySession, 'js_shell_test') schema", ":years').bind('another', 5) #@# CollectionRemove: Error conditions on execute crud =", "import mysqlx mySession = mysqlx.get_session(__uripwd) ensure_schema_does_not_exist(mySession, 'js_shell_test') schema = mySession.create_schema('js_shell_test')", "'carol', \"age\": 14, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA06\", \"name\":", "\"age\": 17, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA02\", \"name\": 'adam',", "after bind crud = collection.remove('name = :data').bind('data', 'donna') validate_crud_functions(crud, ['bind',", "Execution # --------------------------------------- #@ CollectionRemove: remove under condition //! [CollectionRemove:", "5) #@# CollectionRemove: Error conditions on execute crud = collection.remove('name", "remove with binding] docs = collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n')", "collection.remove('name = :data and age > :years').bind(5, 5) crud =", "age > :years').bind() crud = collection.remove('name = :data and age", "#@# CollectionRemove: Error conditions sort crud = collection.remove('some_condition').sort() crud =", ":years').bind('years', 5).execute() # --------------------------------------- # collection.remove Unit Testing: Execution #", "is defined as <user>:<pwd>@<host>:<plugin_port> from __future__ import print_function from mysqlsh", "collection.remove('name = :data').bind('data', 'donna') validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove: valid", "available # Assumes __uripwd is defined as <user>:<pwd>@<host>:<plugin_port> from __future__", "defined as <user>:<pwd>@<host>:<plugin_port> from __future__ import print_function from mysqlsh import", "# ------------------------------------------------ #@ CollectionRemove: valid operations after remove crud =", "crud.limit(1) validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove: valid operations after bind", "= collection.remove('some_condition').limit('') #@# CollectionRemove: Error conditions on bind crud =", "//! [CollectionRemove: remove with binding] docs = collection.find().execute().fetch_all() print('Records Left:',", "CollectionRemove: Error conditions on limit crud = collection.remove('some_condition').limit() crud =", "'donna', \"age\": 16, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA07\", \"name\":", "= collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') #@ CollectionRemove: full remove", "conditions on remove crud = collection.remove() crud = collection.remove(' ')", "#@# CollectionRemove: Error conditions on bind crud = collection.remove('name =", "conditions on bind crud = collection.remove('name = :data and age", "result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA07\", \"name\": 'angel', \"age\": 14, \"gender\": 'male'}).execute()", "collection.remove Unit Testing: Execution # --------------------------------------- #@ CollectionRemove: remove under", "as <user>:<pwd>@<host>:<plugin_port> from __future__ import print_function from mysqlsh import mysqlx", "collection.remove('test = \"2') #@# CollectionRemove: Error conditions sort crud =", "age > :years').bind(5, 5) crud = collection.remove('name = :data and", "CollectionRemove: valid operations after remove crud = collection.remove('some_condition') validate_crud_functions(crud, ['sort',", "\"age\": 16, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA07\", \"name\": 'angel',", "sort crud = collection.remove('some_condition').sort() crud = collection.remove('some_condition').sort(5) crud = collection.remove('some_condition').sort([])", "= collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA03\", \"name\": 'brian', \"age\": 14, \"gender\": 'male'}).execute() result", "print('Affected Rows:', result.affected_items_count, '\\n') //! [CollectionRemove: remove with binding] docs", "crud = collection.remove('some_condition').sort('name', 5) #@# CollectionRemove: Error conditions on limit", "['sort', 'limit', 'bind', 'execute']) #@ CollectionRemove: valid operations after sort", "\"gender\": 'male'}).execute() # ------------------------------------------------ # collection.remove Unit Testing: Dynamic Behavior", "Assumes __uripwd is defined as <user>:<pwd>@<host>:<plugin_port> from __future__ import print_function", "# collection.remove Unit Testing: Dynamic Behavior # ------------------------------------------------ #@ CollectionRemove:", "collection.remove('1').execute() print('Affected Rows:', result.affected_items_count, '\\n') docs = collection.find().execute().fetch_all() print('Records Left:',", "= collection.remove('1').execute() print('Affected Rows:', result.affected_items_count, '\\n') docs = collection.find().execute().fetch_all() print('Records", "print('Records Left:', len(docs), '\\n') //! [CollectionRemove: full remove] # Cleanup", "bind crud = collection.remove('name = :data and age > :years').bind()", "Behavior # ------------------------------------------------ #@ CollectionRemove: valid operations after remove crud", "= 15').execute() print('Affected Rows:', result.affected_items_count, '\\n') docs = collection.find().execute().fetch_all() print('Records", "collection.remove('gender = :heorshe').limit(2).bind('heorshe', 'male').execute() print('Affected Rows:', result.affected_items_count, '\\n') //! [CollectionRemove:", "collection.remove('some_condition') validate_crud_functions(crud, ['sort', 'limit', 'bind', 'execute']) #@ CollectionRemove: valid operations", "Dynamic Behavior # ------------------------------------------------ #@ CollectionRemove: valid operations after remove", "# --------------------------------------- # collection.remove Unit Testing: Execution # --------------------------------------- #@", "'js_shell_test') schema = mySession.create_schema('js_shell_test') # Creates a test collection and", "mysqlx mySession = mysqlx.get_session(__uripwd) ensure_schema_does_not_exist(mySession, 'js_shell_test') schema = mySession.create_schema('js_shell_test') #", "'execute']) #@ CollectionRemove: valid operations after sort crud = crud.sort(['name'])", "execute crud = collection.remove('name = :data and age > :years').execute()", "docs = collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') #@ CollectionRemove: full", "= collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA04\", \"name\": 'alma', \"age\": 13, \"gender\": 'female'}).execute() result", "#@ CollectionRemove: valid operations after execute result = crud.execute() validate_crud_functions(crud,", "operations after sort crud = crud.sort(['name']) validate_crud_functions(crud, ['limit', 'bind', 'execute'])", "\"age\": 14, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA06\", \"name\": 'donna',", "#@# CollectionRemove: Error conditions on limit crud = collection.remove('some_condition').limit() crud", "crud = crud.limit(1) validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove: valid operations", "schema = mySession.create_schema('js_shell_test') # Creates a test collection and inserts", "# Creates a test collection and inserts data into it", "Left:', len(docs), '\\n') //! [CollectionRemove: remove under condition] #@ CollectionRemove:", "Assumptions: validate_crud_functions available # Assumes __uripwd is defined as <user>:<pwd>@<host>:<plugin_port>", "#@# CollectionRemove: Error conditions on remove crud = collection.remove() crud", "validate_crud_functions(crud, ['sort', 'limit', 'bind', 'execute']) #@ CollectionRemove: valid operations after", "full remove] result = collection.remove('1').execute() print('Affected Rows:', result.affected_items_count, '\\n') docs", "len(docs), '\\n') #@ CollectionRemove: full remove //! [CollectionRemove: full remove]", "remove with binding] result = collection.remove('gender = :heorshe').limit(2).bind('heorshe', 'male').execute() print('Affected", "#@ CollectionRemove: valid operations after remove crud = collection.remove('some_condition') validate_crud_functions(crud,", ":data and age > :years').bind('years', 5).execute() # --------------------------------------- # collection.remove", "= crud.sort(['name']) validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@ CollectionRemove: valid operations", "validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@ Reusing CRUD with binding print('Deleted", "collection.remove('some_condition').sort() crud = collection.remove('some_condition').sort(5) crud = collection.remove('some_condition').sort([]) crud = collection.remove('some_condition').sort(['name',", "16, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA07\", \"name\": 'angel', \"age\":", "---------------------------------------------- # collection.remove Unit Testing: Error Conditions # ---------------------------------------------- #@#", "[CollectionRemove: remove with binding] docs = collection.find().execute().fetch_all() print('Records Left:', len(docs),", "['limit', 'bind', 'execute']) #@ CollectionRemove: valid operations after limit crud", "print('Deleted donna:', result.affected_items_count, '\\n') result=crud.bind('data', 'alma').execute() print('Deleted alma:', result.affected_items_count, '\\n')", "'\\n') //! [CollectionRemove: remove under condition] #@ CollectionRemove: remove with", "Error conditions sort crud = collection.remove('some_condition').sort() crud = collection.remove('some_condition').sort(5) crud", "CollectionRemove: Error conditions on remove crud = collection.remove() crud =", "crud = collection.remove('name = :data and age > :years').bind(5, 5)", "mysqlx.get_session(__uripwd) ensure_schema_does_not_exist(mySession, 'js_shell_test') schema = mySession.create_schema('js_shell_test') # Creates a test", "CollectionRemove: valid operations after limit crud = crud.limit(1) validate_crud_functions(crud, ['bind',", "\"name\": 'carol', \"age\": 14, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA06\",", "'donna') validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove: valid operations after execute", "result.affected_items_count, '\\n') result=crud.bind('data', 'alma').execute() print('Deleted alma:', result.affected_items_count, '\\n') # ----------------------------------------------", "collection.remove(5) crud = collection.remove('test = \"2') #@# CollectionRemove: Error conditions", "'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA04\", \"name\": 'alma', \"age\": 13, \"gender\":", "\"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA05\", \"name\": 'carol', \"age\": 14,", "= collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA02\", \"name\": 'adam', \"age\": 15, \"gender\": 'male'}).execute() result", "<user>:<pwd>@<host>:<plugin_port> from __future__ import print_function from mysqlsh import mysqlx mySession", "'execute']) #@ CollectionRemove: valid operations after limit crud = crud.limit(1)", "#@ Reusing CRUD with binding print('Deleted donna:', result.affected_items_count, '\\n') result=crud.bind('data',", "sort crud = crud.sort(['name']) validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@ CollectionRemove:", "and age > :years').bind() crud = collection.remove('name = :data and", "13, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA05\", \"name\": 'carol', \"age\":", "crud = collection.remove('some_condition').limit() crud = collection.remove('some_condition').limit('') #@# CollectionRemove: Error conditions", "= collection.remove('some_condition') validate_crud_functions(crud, ['sort', 'limit', 'bind', 'execute']) #@ CollectionRemove: valid", "#@ CollectionRemove: valid operations after bind crud = collection.remove('name =", "bind crud = collection.remove('name = :data').bind('data', 'donna') validate_crud_functions(crud, ['bind', 'execute'])", "remove crud = collection.remove() crud = collection.remove(' ') crud =", "'bind', 'execute']) #@ CollectionRemove: valid operations after limit crud =", "result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA02\", \"name\": 'adam', \"age\": 15, \"gender\": 'male'}).execute()", "//! [CollectionRemove: remove under condition] #@ CollectionRemove: remove with binding", "on remove crud = collection.remove() crud = collection.remove(' ') crud", "17, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA02\", \"name\": 'adam', \"age\":", "= collection.remove('test = \"2') #@# CollectionRemove: Error conditions sort crud", "'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA05\", \"name\": 'carol', \"age\": 14, \"gender\":", "result.affected_items_count, '\\n') # ---------------------------------------------- # collection.remove Unit Testing: Error Conditions", "__future__ import print_function from mysqlsh import mysqlx mySession = mysqlx.get_session(__uripwd)", "= crud.limit(1) validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove: valid operations after", "\"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA04\", \"name\": 'alma', \"age\": 13,", "5]) crud = collection.remove('some_condition').sort('name', 5) #@# CollectionRemove: Error conditions on", "alma:', result.affected_items_count, '\\n') # ---------------------------------------------- # collection.remove Unit Testing: Error", "collection.remove('some_condition').sort(['name', 5]) crud = collection.remove('some_condition').sort('name', 5) #@# CollectionRemove: Error conditions", "# ---------------------------------------------- # collection.remove Unit Testing: Error Conditions # ----------------------------------------------", "= collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') //! [CollectionRemove: full remove]", "\"3C514FF38144B714E7119BCF48B4CA07\", \"name\": 'angel', \"age\": 14, \"gender\": 'male'}).execute() # ------------------------------------------------ #", "14, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA06\", \"name\": 'donna', \"age\":", "collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA06\", \"name\": 'donna', \"age\": 16, \"gender\": 'female'}).execute() result =", "= mySession.create_schema('js_shell_test') # Creates a test collection and inserts data", "remove] result = collection.remove('1').execute() print('Affected Rows:', result.affected_items_count, '\\n') docs =", "collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA07\", \"name\": 'angel', \"age\": 14, \"gender\": 'male'}).execute() # ------------------------------------------------", "# collection.remove Unit Testing: Execution # --------------------------------------- #@ CollectionRemove: remove", "crud = collection.remove('some_condition').limit('') #@# CollectionRemove: Error conditions on bind crud", "= collection.remove('name = :data and age > :years').bind('years', 5).execute() #", "\"name\": 'brian', \"age\": 14, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA04\",", "\"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA06\", \"name\": 'donna', \"age\": 16,", "> :years').bind('years', 5).execute() # --------------------------------------- # collection.remove Unit Testing: Execution", "mySession = mysqlx.get_session(__uripwd) ensure_schema_does_not_exist(mySession, 'js_shell_test') schema = mySession.create_schema('js_shell_test') # Creates", "= mysqlx.get_session(__uripwd) ensure_schema_does_not_exist(mySession, 'js_shell_test') schema = mySession.create_schema('js_shell_test') # Creates a", ":data and age > :years').bind(5, 5) crud = collection.remove('name =", "import print_function from mysqlsh import mysqlx mySession = mysqlx.get_session(__uripwd) ensure_schema_does_not_exist(mySession,", "'execute']) #@ CollectionRemove: valid operations after bind crud = collection.remove('name", "= collection.remove(' ') crud = collection.remove(5) crud = collection.remove('test =", "= crud.execute() validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@ Reusing CRUD with", "5) crud = collection.remove('name = :data and age > :years').bind('another',", "CollectionRemove: valid operations after execute result = crud.execute() validate_crud_functions(crud, ['limit',", "= collection.remove('some_condition').sort() crud = collection.remove('some_condition').sort(5) crud = collection.remove('some_condition').sort([]) crud =", "Error conditions on limit crud = collection.remove('some_condition').limit() crud = collection.remove('some_condition').limit('')", "remove under condition //! [CollectionRemove: remove under condition] result =", "\"age\": 13, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA05\", \"name\": 'carol',", "limit crud = collection.remove('some_condition').limit() crud = collection.remove('some_condition').limit('') #@# CollectionRemove: Error", "collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') #@ CollectionRemove: full remove //!", "#@# CollectionRemove: Error conditions on execute crud = collection.remove('name =", "crud = collection.remove('some_condition') validate_crud_functions(crud, ['sort', 'limit', 'bind', 'execute']) #@ CollectionRemove:", "collection.remove('name = :data and age > :years').bind('years', 5).execute() # ---------------------------------------", "crud = collection.remove('some_condition').sort([]) crud = collection.remove('some_condition').sort(['name', 5]) crud = collection.remove('some_condition').sort('name',", "Error Conditions # ---------------------------------------------- #@# CollectionRemove: Error conditions on remove", "inserts data into it collection = schema.create_collection('collection1') result = collection.add({\"_id\":", "into it collection = schema.create_collection('collection1') result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA01\", \"name\":", "'execute']) #@ CollectionRemove: valid operations after execute result = crud.execute()", "len(docs), '\\n') //! [CollectionRemove: remove under condition] #@ CollectionRemove: remove", "'brian', \"age\": 14, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA04\", \"name\":", "= collection.remove('age = 15').execute() print('Affected Rows:', result.affected_items_count, '\\n') docs =", "= collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA05\", \"name\": 'carol', \"age\": 14, \"gender\": 'female'}).execute() result", "collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA02\", \"name\": 'adam', \"age\": 15, \"gender\": 'male'}).execute() result =", "validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove: valid operations after bind crud", "= collection.remove(5) crud = collection.remove('test = \"2') #@# CollectionRemove: Error", "'male'}).execute() # ------------------------------------------------ # collection.remove Unit Testing: Dynamic Behavior #", "= :data').bind('data', 'donna') validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove: valid operations", "\"age\": 15, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA03\", \"name\": 'brian',", "crud = collection.remove('some_condition').sort(['name', 5]) crud = collection.remove('some_condition').sort('name', 5) #@# CollectionRemove:", "remove crud = collection.remove('some_condition') validate_crud_functions(crud, ['sort', 'limit', 'bind', 'execute']) #@", "binding //! [CollectionRemove: remove with binding] result = collection.remove('gender =", "Creates a test collection and inserts data into it collection", "crud = collection.remove('name = :data and age > :years').bind('years', 5).execute()", "condition] #@ CollectionRemove: remove with binding //! [CollectionRemove: remove with", "\"3C514FF38144B714E7119BCF48B4CA05\", \"name\": 'carol', \"age\": 14, \"gender\": 'female'}).execute() result = collection.add({\"_id\":", "14, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA04\", \"name\": 'alma', \"age\":", "= collection.remove('some_condition').sort(['name', 5]) crud = collection.remove('some_condition').sort('name', 5) #@# CollectionRemove: Error", "mysqlsh import mysqlx mySession = mysqlx.get_session(__uripwd) ensure_schema_does_not_exist(mySession, 'js_shell_test') schema =", "validate_crud_functions available # Assumes __uripwd is defined as <user>:<pwd>@<host>:<plugin_port> from", "'\\n') #@ CollectionRemove: full remove //! [CollectionRemove: full remove] result", "= :data and age > :years').bind('another', 5) #@# CollectionRemove: Error", "'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA02\", \"name\": 'adam', \"age\": 15, \"gender\":", "CRUD with binding print('Deleted donna:', result.affected_items_count, '\\n') result=crud.bind('data', 'alma').execute() print('Deleted", "[CollectionRemove: remove under condition] result = collection.remove('age = 15').execute() print('Affected", "schema.create_collection('collection1') result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA01\", \"name\": 'jack', \"age\": 17, \"gender\":", "'male').execute() print('Affected Rows:', result.affected_items_count, '\\n') //! [CollectionRemove: remove with binding]", "# ------------------------------------------------ # collection.remove Unit Testing: Dynamic Behavior # ------------------------------------------------", "'\\n') //! [CollectionRemove: remove with binding] docs = collection.find().execute().fetch_all() print('Records", "__uripwd is defined as <user>:<pwd>@<host>:<plugin_port> from __future__ import print_function from", "donna:', result.affected_items_count, '\\n') result=crud.bind('data', 'alma').execute() print('Deleted alma:', result.affected_items_count, '\\n') #", "14, \"gender\": 'male'}).execute() # ------------------------------------------------ # collection.remove Unit Testing: Dynamic", "= collection.remove('some_condition').sort(5) crud = collection.remove('some_condition').sort([]) crud = collection.remove('some_condition').sort(['name', 5]) crud", "---------------------------------------------- #@# CollectionRemove: Error conditions on remove crud = collection.remove()", "on bind crud = collection.remove('name = :data and age >", "15, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA03\", \"name\": 'brian', \"age\":", "# --------------------------------------- #@ CollectionRemove: remove under condition //! [CollectionRemove: remove", "result = crud.execute() validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@ Reusing CRUD", "\"3C514FF38144B714E7119BCF48B4CA02\", \"name\": 'adam', \"age\": 15, \"gender\": 'male'}).execute() result = collection.add({\"_id\":", "'bind', 'execute']) #@ CollectionRemove: valid operations after sort crud =", "len(docs), '\\n') //! [CollectionRemove: full remove] # Cleanup mySession.drop_schema('js_shell_test') mySession.close()", "valid operations after limit crud = crud.limit(1) validate_crud_functions(crud, ['bind', 'execute'])", "= collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA06\", \"name\": 'donna', \"age\": 16, \"gender\": 'female'}).execute() result", "and age > :years').bind(5, 5) crud = collection.remove('name = :data", "limit crud = crud.limit(1) validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove: valid", "operations after bind crud = collection.remove('name = :data').bind('data', 'donna') validate_crud_functions(crud,", ":heorshe').limit(2).bind('heorshe', 'male').execute() print('Affected Rows:', result.affected_items_count, '\\n') //! [CollectionRemove: remove with", "crud = collection.remove('some_condition').sort() crud = collection.remove('some_condition').sort(5) crud = collection.remove('some_condition').sort([]) crud", "Left:', len(docs), '\\n') #@ CollectionRemove: full remove //! [CollectionRemove: full", "= collection.remove('name = :data and age > :years').execute() crud =", "collection.remove('name = :data and age > :years').execute() crud = collection.remove('name", "') crud = collection.remove(5) crud = collection.remove('test = \"2') #@#", "'bind', 'execute']) #@ Reusing CRUD with binding print('Deleted donna:', result.affected_items_count,", "'angel', \"age\": 14, \"gender\": 'male'}).execute() # ------------------------------------------------ # collection.remove Unit", "# Assumptions: validate_crud_functions available # Assumes __uripwd is defined as", "condition] result = collection.remove('age = 15').execute() print('Affected Rows:', result.affected_items_count, '\\n')", "'execute']) #@ Reusing CRUD with binding print('Deleted donna:', result.affected_items_count, '\\n')", "under condition] result = collection.remove('age = 15').execute() print('Affected Rows:', result.affected_items_count,", "result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA01\", \"name\": 'jack', \"age\": 17, \"gender\": 'male'}).execute()", "collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA03\", \"name\": 'brian', \"age\": 14, \"gender\": 'male'}).execute() result =", "= :data and age > :years').bind(5, 5) crud = collection.remove('name", "result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA05\", \"name\": 'carol', \"age\": 14, \"gender\": 'female'}).execute()", "after execute result = crud.execute() validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@", "= :data and age > :years').bind('years', 5).execute() # --------------------------------------- #", "collection.remove('name = :data and age > :years').bind('another', 5) #@# CollectionRemove:", "collection and inserts data into it collection = schema.create_collection('collection1') result", "[CollectionRemove: full remove] result = collection.remove('1').execute() print('Affected Rows:', result.affected_items_count, '\\n')", "Unit Testing: Error Conditions # ---------------------------------------------- #@# CollectionRemove: Error conditions", "Testing: Dynamic Behavior # ------------------------------------------------ #@ CollectionRemove: valid operations after", "\"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA07\", \"name\": 'angel', \"age\": 14,", "# collection.remove Unit Testing: Error Conditions # ---------------------------------------------- #@# CollectionRemove:", "= :data and age > :years').bind() crud = collection.remove('name =", "with binding //! [CollectionRemove: remove with binding] result = collection.remove('gender", "Rows:', result.affected_items_count, '\\n') docs = collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n')", "'\\n') result=crud.bind('data', 'alma').execute() print('Deleted alma:', result.affected_items_count, '\\n') # ---------------------------------------------- #", "valid operations after remove crud = collection.remove('some_condition') validate_crud_functions(crud, ['sort', 'limit',", "collection.remove('some_condition').limit() crud = collection.remove('some_condition').limit('') #@# CollectionRemove: Error conditions on bind", "collection.remove(' ') crud = collection.remove(5) crud = collection.remove('test = \"2')", "[CollectionRemove: remove under condition] #@ CollectionRemove: remove with binding //!", "------------------------------------------------ #@ CollectionRemove: valid operations after remove crud = collection.remove('some_condition')", "CollectionRemove: full remove //! [CollectionRemove: full remove] result = collection.remove('1').execute()", "CollectionRemove: valid operations after bind crud = collection.remove('name = :data').bind('data',", "'limit', 'bind', 'execute']) #@ CollectionRemove: valid operations after sort crud", "valid operations after sort crud = crud.sort(['name']) validate_crud_functions(crud, ['limit', 'bind',", "crud = crud.sort(['name']) validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@ CollectionRemove: valid", "Unit Testing: Dynamic Behavior # ------------------------------------------------ #@ CollectionRemove: valid operations", "['limit', 'bind', 'execute']) #@ Reusing CRUD with binding print('Deleted donna:',", "print('Deleted alma:', result.affected_items_count, '\\n') # ---------------------------------------------- # collection.remove Unit Testing:", "crud = collection.remove(' ') crud = collection.remove(5) crud = collection.remove('test", "collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') //! [CollectionRemove: remove under condition]", "['bind', 'execute']) #@ CollectionRemove: valid operations after execute result =", "= collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA07\", \"name\": 'angel', \"age\": 14, \"gender\": 'male'}).execute() #", "result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA06\", \"name\": 'donna', \"age\": 16, \"gender\": 'female'}).execute()", "mySession.create_schema('js_shell_test') # Creates a test collection and inserts data into", "//! [CollectionRemove: remove under condition] result = collection.remove('age = 15').execute()", "CollectionRemove: Error conditions on bind crud = collection.remove('name = :data", "collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA04\", \"name\": 'alma', \"age\": 13, \"gender\": 'female'}).execute() result =", "conditions sort crud = collection.remove('some_condition').sort() crud = collection.remove('some_condition').sort(5) crud =", "print_function from mysqlsh import mysqlx mySession = mysqlx.get_session(__uripwd) ensure_schema_does_not_exist(mySession, 'js_shell_test')", "15').execute() print('Affected Rows:', result.affected_items_count, '\\n') docs = collection.find().execute().fetch_all() print('Records Left:',", "collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') //! [CollectionRemove: full remove] #", "Testing: Execution # --------------------------------------- #@ CollectionRemove: remove under condition //!", "collection = schema.create_collection('collection1') result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA01\", \"name\": 'jack', \"age\":", "a test collection and inserts data into it collection =", "result = collection.remove('gender = :heorshe').limit(2).bind('heorshe', 'male').execute() print('Affected Rows:', result.affected_items_count, '\\n')", "\"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA02\", \"name\": 'adam', \"age\": 15,", "and age > :years').execute() crud = collection.remove('name = :data and", "operations after limit crud = crud.limit(1) validate_crud_functions(crud, ['bind', 'execute']) #@", "\"3C514FF38144B714E7119BCF48B4CA01\", \"name\": 'jack', \"age\": 17, \"gender\": 'male'}).execute() result = collection.add({\"_id\":", "Testing: Error Conditions # ---------------------------------------------- #@# CollectionRemove: Error conditions on", "CollectionRemove: valid operations after sort crud = crud.sort(['name']) validate_crud_functions(crud, ['limit',", "result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA03\", \"name\": 'brian', \"age\": 14, \"gender\": 'male'}).execute()", "collection.remove() crud = collection.remove(' ') crud = collection.remove(5) crud =", "validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove: valid operations after execute result", "age > :years').bind('years', 5).execute() # --------------------------------------- # collection.remove Unit Testing:", "crud = collection.remove('name = :data').bind('data', 'donna') validate_crud_functions(crud, ['bind', 'execute']) #@", "'alma').execute() print('Deleted alma:', result.affected_items_count, '\\n') # ---------------------------------------------- # collection.remove Unit", "= schema.create_collection('collection1') result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA01\", \"name\": 'jack', \"age\": 17,", "execute result = crud.execute() validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@ Reusing", "Unit Testing: Execution # --------------------------------------- #@ CollectionRemove: remove under condition", "\"age\": 14, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA04\", \"name\": 'alma',", "Left:', len(docs), '\\n') //! [CollectionRemove: full remove] # Cleanup mySession.drop_schema('js_shell_test')", "= collection.remove('gender = :heorshe').limit(2).bind('heorshe', 'male').execute() print('Affected Rows:', result.affected_items_count, '\\n') //!", "Error conditions on remove crud = collection.remove() crud = collection.remove('", "under condition] #@ CollectionRemove: remove with binding //! [CollectionRemove: remove", "crud = collection.remove('some_condition').sort(5) crud = collection.remove('some_condition').sort([]) crud = collection.remove('some_condition').sort(['name', 5])", "# Assumes __uripwd is defined as <user>:<pwd>@<host>:<plugin_port> from __future__ import", "CollectionRemove: Error conditions on execute crud = collection.remove('name = :data", "result = collection.remove('age = 15').execute() print('Affected Rows:', result.affected_items_count, '\\n') docs", "//! [CollectionRemove: remove with binding] result = collection.remove('gender = :heorshe').limit(2).bind('heorshe',", "conditions on execute crud = collection.remove('name = :data and age", "remove under condition] #@ CollectionRemove: remove with binding //! [CollectionRemove:", "crud.execute() validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@ Reusing CRUD with binding", "collection.remove('some_condition').limit('') #@# CollectionRemove: Error conditions on bind crud = collection.remove('name", "print('Affected Rows:', result.affected_items_count, '\\n') docs = collection.find().execute().fetch_all() print('Records Left:', len(docs),", "--------------------------------------- # collection.remove Unit Testing: Execution # --------------------------------------- #@ CollectionRemove:", "'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA06\", \"name\": 'donna', \"age\": 16, \"gender\":", "# ---------------------------------------------- #@# CollectionRemove: Error conditions on remove crud =", "with binding] docs = collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') #@", "from __future__ import print_function from mysqlsh import mysqlx mySession =", "Reusing CRUD with binding print('Deleted donna:', result.affected_items_count, '\\n') result=crud.bind('data', 'alma').execute()", "'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA07\", \"name\": 'angel', \"age\": 14, \"gender\":", "valid operations after bind crud = collection.remove('name = :data').bind('data', 'donna')", "collection.remove('some_condition').sort('name', 5) #@# CollectionRemove: Error conditions on limit crud =", "crud = collection.remove('name = :data and age > :years').bind('another', 5)", "= collection.remove('some_condition').sort('name', 5) #@# CollectionRemove: Error conditions on limit crud", "\"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA03\", \"name\": 'brian', \"age\": 14,", "binding] docs = collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') #@ CollectionRemove:", "Error conditions on execute crud = collection.remove('name = :data and", "result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA04\", \"name\": 'alma', \"age\": 13, \"gender\": 'female'}).execute()", "crud.sort(['name']) validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@ CollectionRemove: valid operations after", "> :years').bind() crud = collection.remove('name = :data and age >", "condition //! [CollectionRemove: remove under condition] result = collection.remove('age =", ":data and age > :years').bind() crud = collection.remove('name = :data", "= \"2') #@# CollectionRemove: Error conditions sort crud = collection.remove('some_condition').sort()", "= collection.remove('name = :data and age > :years').bind(5, 5) crud", "Error conditions on bind crud = collection.remove('name = :data and", "CollectionRemove: remove with binding //! [CollectionRemove: remove with binding] result", "result.affected_items_count, '\\n') //! [CollectionRemove: remove with binding] docs = collection.find().execute().fetch_all()", "remove //! [CollectionRemove: full remove] result = collection.remove('1').execute() print('Affected Rows:',", "and age > :years').bind('another', 5) #@# CollectionRemove: Error conditions on", "remove under condition] result = collection.remove('age = 15').execute() print('Affected Rows:',", "ensure_schema_does_not_exist(mySession, 'js_shell_test') schema = mySession.create_schema('js_shell_test') # Creates a test collection", "docs = collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') //! [CollectionRemove: remove", "= collection.remove('some_condition').limit() crud = collection.remove('some_condition').limit('') #@# CollectionRemove: Error conditions on", "on limit crud = collection.remove('some_condition').limit() crud = collection.remove('some_condition').limit('') #@# CollectionRemove:", "= :heorshe').limit(2).bind('heorshe', 'male').execute() print('Affected Rows:', result.affected_items_count, '\\n') //! [CollectionRemove: remove", "age > :years').bind('another', 5) #@# CollectionRemove: Error conditions on execute", "and age > :years').bind('years', 5).execute() # --------------------------------------- # collection.remove Unit", "collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA05\", \"name\": 'carol', \"age\": 14, \"gender\": 'female'}).execute() result =", "//! [CollectionRemove: full remove] result = collection.remove('1').execute() print('Affected Rows:', result.affected_items_count,", "after remove crud = collection.remove('some_condition') validate_crud_functions(crud, ['sort', 'limit', 'bind', 'execute'])", "= collection.remove('name = :data and age > :years').bind('another', 5) #@#", ":years').bind(5, 5) crud = collection.remove('name = :data and age >", "= :data and age > :years').execute() crud = collection.remove('name =", "test collection and inserts data into it collection = schema.create_collection('collection1')", "------------------------------------------------ # collection.remove Unit Testing: Dynamic Behavior # ------------------------------------------------ #@", "5) #@# CollectionRemove: Error conditions on limit crud = collection.remove('some_condition').limit()", "#@ CollectionRemove: remove with binding //! [CollectionRemove: remove with binding]", "docs = collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') //! [CollectionRemove: full", "['bind', 'execute']) #@ CollectionRemove: valid operations after bind crud =", "= collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA01\", \"name\": 'jack', \"age\": 17, \"gender\": 'male'}).execute() result", "collection.remove('some_condition').sort(5) crud = collection.remove('some_condition').sort([]) crud = collection.remove('some_condition').sort(['name', 5]) crud =", "after limit crud = crud.limit(1) validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove:", "crud = collection.remove('name = :data and age > :years').bind() crud", "under condition //! [CollectionRemove: remove under condition] result = collection.remove('age", "CollectionRemove: remove under condition //! [CollectionRemove: remove under condition] result", "\"age\": 14, \"gender\": 'male'}).execute() # ------------------------------------------------ # collection.remove Unit Testing:", "valid operations after execute result = crud.execute() validate_crud_functions(crud, ['limit', 'bind',", "= collection.remove() crud = collection.remove(' ') crud = collection.remove(5) crud", "= collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') //! [CollectionRemove: remove under", "result.affected_items_count, '\\n') docs = collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') //!", "\"3C514FF38144B714E7119BCF48B4CA04\", \"name\": 'alma', \"age\": 13, \"gender\": 'female'}).execute() result = collection.add({\"_id\":", "Rows:', result.affected_items_count, '\\n') //! [CollectionRemove: remove with binding] docs =", "print('Records Left:', len(docs), '\\n') #@ CollectionRemove: full remove //! [CollectionRemove:", "collection.remove Unit Testing: Error Conditions # ---------------------------------------------- #@# CollectionRemove: Error", "after sort crud = crud.sort(['name']) validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@", "validate_crud_functions(crud, ['limit', 'bind', 'execute']) #@ CollectionRemove: valid operations after limit", "Conditions # ---------------------------------------------- #@# CollectionRemove: Error conditions on remove crud", "'\\n') docs = collection.find().execute().fetch_all() print('Records Left:', len(docs), '\\n') //! [CollectionRemove:", "conditions on limit crud = collection.remove('some_condition').limit() crud = collection.remove('some_condition').limit('') #@#", "= collection.remove('name = :data').bind('data', 'donna') validate_crud_functions(crud, ['bind', 'execute']) #@ CollectionRemove:", "'alma', \"age\": 13, \"gender\": 'female'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA05\", \"name\":", "\"name\": 'adam', \"age\": 15, \"gender\": 'male'}).execute() result = collection.add({\"_id\": \"3C514FF38144B714E7119BCF48B4CA03\",", "collection.remove Unit Testing: Dynamic Behavior # ------------------------------------------------ #@ CollectionRemove: valid", "#@ CollectionRemove: valid operations after limit crud = crud.limit(1) validate_crud_functions(crud," ]
[ "import turtle turtle.bgcolor('black') wn=turtle.Screen() tr=turtle.Turtle() move=1 tr.speed(\"fastest\") for i in", "tr.forward(50) tr.right(70) tr.penup() tr.pendown() tr.color('light blue') tr.forward(50) tr.color('light green') tr.pu()", "tr.pendown() tr.color('light blue') tr.forward(50) tr.color('light green') tr.pu() tr.pd() tr.color(\"light blue\")", "tr.color(\"orange\") tr.right(move) tr.forward(100) tr.penup() tr.color(\"white\") tr.pendown() tr.right(30) tr.forward(60) tr.pendown() tr.color(\"light", "tr.color('light blue') tr.circle(4) tr.pu() tr.fd(20) tr.pd() tr.circle(6) tr.pu() tr.fd(40) tr.pd()", "tr.circle(4) tr.pu() tr.fd(20) tr.pd() tr.circle(6) tr.pu() tr.fd(40) tr.pd() tr.circle(8) tr.pu()", "tr.forward(100) tr.color('brown') tr.forward(200) tr.pu() tr.pd() tr.color('light green') tr.circle(2) tr.color('light blue')", "tr.fd(80) tr.pd() tr.circle(10) tr.pu() tr.fd(120) tr.pd() tr.circle(20) tr.color('yellow') tr.circle(10) tr.pu()", "tr.pd() tr.color(\"light blue\") tr.forward(100) tr.color('brown') tr.forward(200) tr.pu() tr.pd() tr.color('light green')", "tr.pendown() tr.color(\"orange\") tr.right(move) tr.forward(100) tr.penup() tr.color(\"white\") tr.pendown() tr.right(30) tr.forward(60) tr.pendown()", "tr.circle(8) tr.pu() tr.fd(80) tr.pd() tr.circle(10) tr.pu() tr.fd(120) tr.pd() tr.circle(20) tr.color('yellow')", "turtle.bgcolor('black') wn=turtle.Screen() tr=turtle.Turtle() move=1 tr.speed(\"fastest\") for i in range (360):", "tr.pd() tr.circle(10) tr.pu() tr.fd(120) tr.pd() tr.circle(20) tr.color('yellow') tr.circle(10) tr.pu() tr.pd()", "tr.pendown() tr.color(\"light green\") tr.left(10) tr.forward(50) tr.right(70) tr.penup() tr.pendown() tr.color('light blue')", "move=1 tr.speed(\"fastest\") for i in range (360): tr.write(\"ADITYA\",'false','center',font=('Showcard gothic',50)) tr.penup()", "range (360): tr.write(\"ADITYA\",'false','center',font=('Showcard gothic',50)) tr.penup() tr.goto(-200,100) tr.pendown() tr.color(\"orange\") tr.right(move) tr.forward(100)", "tr.fd(40) tr.pd() tr.circle(8) tr.pu() tr.fd(80) tr.pd() tr.circle(10) tr.pu() tr.fd(120) tr.pd()", "blue') tr.forward(50) tr.color('light green') tr.pu() tr.pd() tr.color(\"light blue\") tr.forward(100) tr.color('brown')", "tr.circle(6) tr.pu() tr.fd(40) tr.pd() tr.circle(8) tr.pu() tr.fd(80) tr.pd() tr.circle(10) tr.pu()", "tr.forward(60) tr.pendown() tr.color(\"light green\") tr.left(10) tr.forward(50) tr.right(70) tr.penup() tr.pendown() tr.color('light", "tr.pd() tr.circle(6) tr.pu() tr.fd(40) tr.pd() tr.circle(8) tr.pu() tr.fd(80) tr.pd() tr.circle(10)", "tr.penup() tr.pendown() tr.color('light blue') tr.forward(50) tr.color('light green') tr.pu() tr.pd() tr.color(\"light", "blue\") tr.forward(100) tr.color('brown') tr.forward(200) tr.pu() tr.pd() tr.color('light green') tr.circle(2) tr.color('light", "tr.pu() tr.pd() tr.color('white') tr.forward(150) tr.color('red') tr.fd(50) tr.color ('blue') tr.begin_fill() tr.penup()", "tr.forward(200) tr.pu() tr.pd() tr.color('light green') tr.circle(2) tr.color('light blue') tr.circle(4) tr.pu()", "tr.forward(100) tr.penup() tr.color(\"white\") tr.pendown() tr.right(30) tr.forward(60) tr.pendown() tr.color(\"light green\") tr.left(10)", "tr.penup() tr.color(\"white\") tr.pendown() tr.right(30) tr.forward(60) tr.pendown() tr.color(\"light green\") tr.left(10) tr.forward(50)", "green\") tr.left(10) tr.forward(50) tr.right(70) tr.penup() tr.pendown() tr.color('light blue') tr.forward(50) tr.color('light", "tr.color(\"white\") tr.pendown() tr.right(30) tr.forward(60) tr.pendown() tr.color(\"light green\") tr.left(10) tr.forward(50) tr.right(70)", "tr.pd() tr.color('light green') tr.circle(2) tr.color('light blue') tr.circle(4) tr.pu() tr.fd(20) tr.pd()", "tr.right(30) tr.forward(60) tr.pendown() tr.color(\"light green\") tr.left(10) tr.forward(50) tr.right(70) tr.penup() tr.pendown()", "tr.circle(10) tr.pu() tr.pd() tr.color('white') tr.forward(150) tr.color('red') tr.fd(50) tr.color ('blue') tr.begin_fill()", "tr.pu() tr.pd() tr.color(\"light blue\") tr.forward(100) tr.color('brown') tr.forward(200) tr.pu() tr.pd() tr.color('light", "tr.fd(120) tr.pd() tr.circle(20) tr.color('yellow') tr.circle(10) tr.pu() tr.pd() tr.color('white') tr.forward(150) tr.color('red')", "tr.right(move) tr.forward(100) tr.penup() tr.color(\"white\") tr.pendown() tr.right(30) tr.forward(60) tr.pendown() tr.color(\"light green\")", "tr.pd() tr.circle(8) tr.pu() tr.fd(80) tr.pd() tr.circle(10) tr.pu() tr.fd(120) tr.pd() tr.circle(20)", "tr.forward(50) tr.color('light green') tr.pu() tr.pd() tr.color(\"light blue\") tr.forward(100) tr.color('brown') tr.forward(200)", "wn=turtle.Screen() tr=turtle.Turtle() move=1 tr.speed(\"fastest\") for i in range (360): tr.write(\"ADITYA\",'false','center',font=('Showcard", "tr.pd() tr.circle(20) tr.color('yellow') tr.circle(10) tr.pu() tr.pd() tr.color('white') tr.forward(150) tr.color('red') tr.fd(50)", "tr.pu() tr.fd(20) tr.pd() tr.circle(6) tr.pu() tr.fd(40) tr.pd() tr.circle(8) tr.pu() tr.fd(80)", "tr.goto(-200,100) tr.pendown() tr.color(\"orange\") tr.right(move) tr.forward(100) tr.penup() tr.color(\"white\") tr.pendown() tr.right(30) tr.forward(60)", "tr=turtle.Turtle() move=1 tr.speed(\"fastest\") for i in range (360): tr.write(\"ADITYA\",'false','center',font=('Showcard gothic',50))", "tr.penup() tr.goto(-200,100) tr.pendown() tr.color(\"orange\") tr.right(move) tr.forward(100) tr.penup() tr.color(\"white\") tr.pendown() tr.right(30)", "tr.pu() tr.pd() tr.color('light green') tr.circle(2) tr.color('light blue') tr.circle(4) tr.pu() tr.fd(20)", "gothic',50)) tr.penup() tr.goto(-200,100) tr.pendown() tr.color(\"orange\") tr.right(move) tr.forward(100) tr.penup() tr.color(\"white\") tr.pendown()", "tr.forward(150) tr.color('red') tr.fd(50) tr.color ('blue') tr.begin_fill() tr.penup() tr.home() move=move+1 tr.penup()", "green') tr.pu() tr.pd() tr.color(\"light blue\") tr.forward(100) tr.color('brown') tr.forward(200) tr.pu() tr.pd()", "tr.pu() tr.fd(40) tr.pd() tr.circle(8) tr.pu() tr.fd(80) tr.pd() tr.circle(10) tr.pu() tr.fd(120)", "tr.write(\"ADITYA\",'false','center',font=('Showcard gothic',50)) tr.penup() tr.goto(-200,100) tr.pendown() tr.color(\"orange\") tr.right(move) tr.forward(100) tr.penup() tr.color(\"white\")", "tr.circle(2) tr.color('light blue') tr.circle(4) tr.pu() tr.fd(20) tr.pd() tr.circle(6) tr.pu() tr.fd(40)", "tr.left(10) tr.forward(50) tr.right(70) tr.penup() tr.pendown() tr.color('light blue') tr.forward(50) tr.color('light green')", "tr.color('white') tr.forward(150) tr.color('red') tr.fd(50) tr.color ('blue') tr.begin_fill() tr.penup() tr.home() move=move+1", "tr.speed(\"fastest\") for i in range (360): tr.write(\"ADITYA\",'false','center',font=('Showcard gothic',50)) tr.penup() tr.goto(-200,100)", "i in range (360): tr.write(\"ADITYA\",'false','center',font=('Showcard gothic',50)) tr.penup() tr.goto(-200,100) tr.pendown() tr.color(\"orange\")", "tr.color(\"light green\") tr.left(10) tr.forward(50) tr.right(70) tr.penup() tr.pendown() tr.color('light blue') tr.forward(50)", "tr.pu() tr.fd(120) tr.pd() tr.circle(20) tr.color('yellow') tr.circle(10) tr.pu() tr.pd() tr.color('white') tr.forward(150)", "tr.color('brown') tr.forward(200) tr.pu() tr.pd() tr.color('light green') tr.circle(2) tr.color('light blue') tr.circle(4)", "for i in range (360): tr.write(\"ADITYA\",'false','center',font=('Showcard gothic',50)) tr.penup() tr.goto(-200,100) tr.pendown()", "tr.right(70) tr.penup() tr.pendown() tr.color('light blue') tr.forward(50) tr.color('light green') tr.pu() tr.pd()", "tr.color('light green') tr.pu() tr.pd() tr.color(\"light blue\") tr.forward(100) tr.color('brown') tr.forward(200) tr.pu()", "tr.color('red') tr.fd(50) tr.color ('blue') tr.begin_fill() tr.penup() tr.home() move=move+1 tr.penup() tr.forward(50)", "tr.color(\"light blue\") tr.forward(100) tr.color('brown') tr.forward(200) tr.pu() tr.pd() tr.color('light green') tr.circle(2)", "tr.pendown() tr.right(30) tr.forward(60) tr.pendown() tr.color(\"light green\") tr.left(10) tr.forward(50) tr.right(70) tr.penup()", "tr.color('light green') tr.circle(2) tr.color('light blue') tr.circle(4) tr.pu() tr.fd(20) tr.pd() tr.circle(6)", "<gh_stars>0 import turtle turtle.bgcolor('black') wn=turtle.Screen() tr=turtle.Turtle() move=1 tr.speed(\"fastest\") for i", "tr.color('light blue') tr.forward(50) tr.color('light green') tr.pu() tr.pd() tr.color(\"light blue\") tr.forward(100)", "(360): tr.write(\"ADITYA\",'false','center',font=('Showcard gothic',50)) tr.penup() tr.goto(-200,100) tr.pendown() tr.color(\"orange\") tr.right(move) tr.forward(100) tr.penup()", "blue') tr.circle(4) tr.pu() tr.fd(20) tr.pd() tr.circle(6) tr.pu() tr.fd(40) tr.pd() tr.circle(8)", "tr.fd(20) tr.pd() tr.circle(6) tr.pu() tr.fd(40) tr.pd() tr.circle(8) tr.pu() tr.fd(80) tr.pd()", "tr.fd(50) tr.color ('blue') tr.begin_fill() tr.penup() tr.home() move=move+1 tr.penup() tr.forward(50) turtle.done()", "tr.pd() tr.color('white') tr.forward(150) tr.color('red') tr.fd(50) tr.color ('blue') tr.begin_fill() tr.penup() tr.home()", "green') tr.circle(2) tr.color('light blue') tr.circle(4) tr.pu() tr.fd(20) tr.pd() tr.circle(6) tr.pu()", "tr.pu() tr.fd(80) tr.pd() tr.circle(10) tr.pu() tr.fd(120) tr.pd() tr.circle(20) tr.color('yellow') tr.circle(10)", "in range (360): tr.write(\"ADITYA\",'false','center',font=('Showcard gothic',50)) tr.penup() tr.goto(-200,100) tr.pendown() tr.color(\"orange\") tr.right(move)", "tr.circle(20) tr.color('yellow') tr.circle(10) tr.pu() tr.pd() tr.color('white') tr.forward(150) tr.color('red') tr.fd(50) tr.color", "tr.circle(10) tr.pu() tr.fd(120) tr.pd() tr.circle(20) tr.color('yellow') tr.circle(10) tr.pu() tr.pd() tr.color('white')", "turtle turtle.bgcolor('black') wn=turtle.Screen() tr=turtle.Turtle() move=1 tr.speed(\"fastest\") for i in range", "tr.color('yellow') tr.circle(10) tr.pu() tr.pd() tr.color('white') tr.forward(150) tr.color('red') tr.fd(50) tr.color ('blue')" ]
[ ".user import User from .user import UserIndex from .auth import", "Group from .user import User from .user import UserIndex from", "组, 及相关认证数据库模型\"\"\" from .group import Group from .user import User", "\"\"\"用户, 组, 及相关认证数据库模型\"\"\" from .group import Group from .user import", "from .group import Group from .user import User from .user", "from .user import User from .user import UserIndex from .auth", "import User from .user import UserIndex from .auth import Authentication", "User from .user import UserIndex from .auth import Authentication from", "import Group from .user import User from .user import UserIndex", "import UserIndex from .auth import Authentication from .accesspoint import AccessPoint", "from .user import UserIndex from .auth import Authentication from .accesspoint", ".group import Group from .user import User from .user import", "及相关认证数据库模型\"\"\" from .group import Group from .user import User from", ".user import UserIndex from .auth import Authentication from .accesspoint import" ]
[ "bino = int(input()) cino = int(input()) if (bino+cino)%2==0: print(\"Bino\") else:", "= int(input()) cino = int(input()) if (bino+cino)%2==0: print(\"Bino\") else: print(\"Cino\")" ]
[ "x in easy] easy_solved = \"\" for el in easy:", "medium_solved = \"\" for el in medium: medium_solved += \"{},", "sorted(os.listdir(f\"{curr_dir}/{lang}/medium\")) medium = [x.split(\"_\")[0] for x in medium] medium_solved =", "in coding.\\n\") langs = [l for l in os.listdir(curr_dir) if", "= os.path.dirname(__file__) with open(os.path.join(curr_dir, \"README.md\"), 'w') as readme: readme.write(\"# LeetCode\\nDeliberate", "files at ./easy and ./medium folders. \"\"\" import os curr_dir", "medium = sorted(os.listdir(f\"{curr_dir}/{lang}/medium\")) medium = [x.split(\"_\")[0] for x in medium]", "./easy and ./medium folders. \"\"\" import os curr_dir = os.path.dirname(__file__)", "'.'] for lang in langs: readme.write(\"## {}\\n\".format(lang)) readme.write(\"### Easy\\n\") easy", "'w') as readme: readme.write(\"# LeetCode\\nDeliberate practice in coding.\\n\") langs =", "= \"\" for el in medium: medium_solved += \"{}, \".format(el)", "= [l for l in os.listdir(curr_dir) if os.path.isdir(os.path.join(curr_dir, l)) and", "os.path.dirname(__file__) with open(os.path.join(curr_dir, \"README.md\"), 'w') as readme: readme.write(\"# LeetCode\\nDeliberate practice", "+ \"\\n\") readme.write(\"### Medium\\n\") medium = sorted(os.listdir(f\"{curr_dir}/{lang}/medium\")) medium = [x.split(\"_\")[0]", "coding.\\n\") langs = [l for l in os.listdir(curr_dir) if os.path.isdir(os.path.join(curr_dir,", "with respect to files at ./easy and ./medium folders. \"\"\"", "langs = [l for l in os.listdir(curr_dir) if os.path.isdir(os.path.join(curr_dir, l))", "langs: readme.write(\"## {}\\n\".format(lang)) readme.write(\"### Easy\\n\") easy = sorted(os.listdir(f\"{curr_dir}/{lang}/easy\")) easy =", "at ./easy and ./medium folders. \"\"\" import os curr_dir =", "and l[0] != '.'] for lang in langs: readme.write(\"## {}\\n\".format(lang))", "= [x.split(\"_\")[0] for x in easy] easy_solved = \"\" for", "for lang in langs: readme.write(\"## {}\\n\".format(lang)) readme.write(\"### Easy\\n\") easy =", "`README.md` with respect to files at ./easy and ./medium folders.", "readme: readme.write(\"# LeetCode\\nDeliberate practice in coding.\\n\") langs = [l for", "easy_solved = \"\" for el in easy: easy_solved += \"{},", "Script updates `README.md` with respect to files at ./easy and", "in langs: readme.write(\"## {}\\n\".format(lang)) readme.write(\"### Easy\\n\") easy = sorted(os.listdir(f\"{curr_dir}/{lang}/easy\")) easy", "import os curr_dir = os.path.dirname(__file__) with open(os.path.join(curr_dir, \"README.md\"), 'w') as", "el in easy: easy_solved += \"{}, \".format(el) readme.write(easy_solved[:-2] + \"\\n\")", "readme.write(easy_solved[:-2] + \"\\n\") readme.write(\"### Medium\\n\") medium = sorted(os.listdir(f\"{curr_dir}/{lang}/medium\")) medium =", "respect to files at ./easy and ./medium folders. \"\"\" import", "easy: easy_solved += \"{}, \".format(el) readme.write(easy_solved[:-2] + \"\\n\") readme.write(\"### Medium\\n\")", "= sorted(os.listdir(f\"{curr_dir}/{lang}/easy\")) easy = [x.split(\"_\")[0] for x in easy] easy_solved", "in medium] medium_solved = \"\" for el in medium: medium_solved", "\".format(el) readme.write(easy_solved[:-2] + \"\\n\") readme.write(\"### Medium\\n\") medium = sorted(os.listdir(f\"{curr_dir}/{lang}/medium\")) medium", "x in medium] medium_solved = \"\" for el in medium:", "LeetCode\\nDeliberate practice in coding.\\n\") langs = [l for l in", "medium] medium_solved = \"\" for el in medium: medium_solved +=", "l[0] != '.'] for lang in langs: readme.write(\"## {}\\n\".format(lang)) readme.write(\"###", "for l in os.listdir(curr_dir) if os.path.isdir(os.path.join(curr_dir, l)) and l[0] !=", "for x in medium] medium_solved = \"\" for el in", "in os.listdir(curr_dir) if os.path.isdir(os.path.join(curr_dir, l)) and l[0] != '.'] for", "\"README.md\"), 'w') as readme: readme.write(\"# LeetCode\\nDeliberate practice in coding.\\n\") langs", "\"\"\" Script updates `README.md` with respect to files at ./easy", "if os.path.isdir(os.path.join(curr_dir, l)) and l[0] != '.'] for lang in", "updates `README.md` with respect to files at ./easy and ./medium", "in easy] easy_solved = \"\" for el in easy: easy_solved", "= \"\" for el in easy: easy_solved += \"{}, \".format(el)", "\"\" for el in medium: medium_solved += \"{}, \".format(el) readme.write(medium_solved[:-2]", "folders. \"\"\" import os curr_dir = os.path.dirname(__file__) with open(os.path.join(curr_dir, \"README.md\"),", "for x in easy] easy_solved = \"\" for el in", "{}\\n\".format(lang)) readme.write(\"### Easy\\n\") easy = sorted(os.listdir(f\"{curr_dir}/{lang}/easy\")) easy = [x.split(\"_\")[0] for", "for el in easy: easy_solved += \"{}, \".format(el) readme.write(easy_solved[:-2] +", "\"{}, \".format(el) readme.write(easy_solved[:-2] + \"\\n\") readme.write(\"### Medium\\n\") medium = sorted(os.listdir(f\"{curr_dir}/{lang}/medium\"))", "[l for l in os.listdir(curr_dir) if os.path.isdir(os.path.join(curr_dir, l)) and l[0]", "!= '.'] for lang in langs: readme.write(\"## {}\\n\".format(lang)) readme.write(\"### Easy\\n\")", "to files at ./easy and ./medium folders. \"\"\" import os", "el in medium: medium_solved += \"{}, \".format(el) readme.write(medium_solved[:-2] + '\\n')", "in easy: easy_solved += \"{}, \".format(el) readme.write(easy_solved[:-2] + \"\\n\") readme.write(\"###", "\"\" for el in easy: easy_solved += \"{}, \".format(el) readme.write(easy_solved[:-2]", "easy = sorted(os.listdir(f\"{curr_dir}/{lang}/easy\")) easy = [x.split(\"_\")[0] for x in easy]", "[x.split(\"_\")[0] for x in easy] easy_solved = \"\" for el", "l in os.listdir(curr_dir) if os.path.isdir(os.path.join(curr_dir, l)) and l[0] != '.']", "sorted(os.listdir(f\"{curr_dir}/{lang}/easy\")) easy = [x.split(\"_\")[0] for x in easy] easy_solved =", "open(os.path.join(curr_dir, \"README.md\"), 'w') as readme: readme.write(\"# LeetCode\\nDeliberate practice in coding.\\n\")", "easy = [x.split(\"_\")[0] for x in easy] easy_solved = \"\"", "./medium folders. \"\"\" import os curr_dir = os.path.dirname(__file__) with open(os.path.join(curr_dir,", "[x.split(\"_\")[0] for x in medium] medium_solved = \"\" for el", "readme.write(\"### Medium\\n\") medium = sorted(os.listdir(f\"{curr_dir}/{lang}/medium\")) medium = [x.split(\"_\")[0] for x", "curr_dir = os.path.dirname(__file__) with open(os.path.join(curr_dir, \"README.md\"), 'w') as readme: readme.write(\"#", "= sorted(os.listdir(f\"{curr_dir}/{lang}/medium\")) medium = [x.split(\"_\")[0] for x in medium] medium_solved", "easy] easy_solved = \"\" for el in easy: easy_solved +=", "readme.write(\"### Easy\\n\") easy = sorted(os.listdir(f\"{curr_dir}/{lang}/easy\")) easy = [x.split(\"_\")[0] for x", "l)) and l[0] != '.'] for lang in langs: readme.write(\"##", "\"\\n\") readme.write(\"### Medium\\n\") medium = sorted(os.listdir(f\"{curr_dir}/{lang}/medium\")) medium = [x.split(\"_\")[0] for", "practice in coding.\\n\") langs = [l for l in os.listdir(curr_dir)", "as readme: readme.write(\"# LeetCode\\nDeliberate practice in coding.\\n\") langs = [l", "= [x.split(\"_\")[0] for x in medium] medium_solved = \"\" for", "os.path.isdir(os.path.join(curr_dir, l)) and l[0] != '.'] for lang in langs:", "lang in langs: readme.write(\"## {}\\n\".format(lang)) readme.write(\"### Easy\\n\") easy = sorted(os.listdir(f\"{curr_dir}/{lang}/easy\"))", "os curr_dir = os.path.dirname(__file__) with open(os.path.join(curr_dir, \"README.md\"), 'w') as readme:", "medium = [x.split(\"_\")[0] for x in medium] medium_solved = \"\"", "Medium\\n\") medium = sorted(os.listdir(f\"{curr_dir}/{lang}/medium\")) medium = [x.split(\"_\")[0] for x in", "with open(os.path.join(curr_dir, \"README.md\"), 'w') as readme: readme.write(\"# LeetCode\\nDeliberate practice in", "readme.write(\"# LeetCode\\nDeliberate practice in coding.\\n\") langs = [l for l", "\"\"\" import os curr_dir = os.path.dirname(__file__) with open(os.path.join(curr_dir, \"README.md\"), 'w')", "Easy\\n\") easy = sorted(os.listdir(f\"{curr_dir}/{lang}/easy\")) easy = [x.split(\"_\")[0] for x in", "readme.write(\"## {}\\n\".format(lang)) readme.write(\"### Easy\\n\") easy = sorted(os.listdir(f\"{curr_dir}/{lang}/easy\")) easy = [x.split(\"_\")[0]", "easy_solved += \"{}, \".format(el) readme.write(easy_solved[:-2] + \"\\n\") readme.write(\"### Medium\\n\") medium", "os.listdir(curr_dir) if os.path.isdir(os.path.join(curr_dir, l)) and l[0] != '.'] for lang", "for el in medium: medium_solved += \"{}, \".format(el) readme.write(medium_solved[:-2] +", "+= \"{}, \".format(el) readme.write(easy_solved[:-2] + \"\\n\") readme.write(\"### Medium\\n\") medium =", "and ./medium folders. \"\"\" import os curr_dir = os.path.dirname(__file__) with" ]
[ "table.columns = [ (\"ID_ALL_RAW_DATA\", (\"int\",)), (\"ABUNDANCE\", (\"double\",)), (\"BIOMASS\", (\"double\",)), (\"ID_SPECIES\",", "# Create site table table = Table(\"site\", delimiter=\",\", header_rows=0, contains_pk=False)", "import parse_version from retriever.lib.models import Table from retriever.lib.templates import Script", "Table(\"ID_ABUNDANCE\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_ABUNDANCE\", (\"int\",)), (\"ABUNDANCE_TYPE\",", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SPECIES\", (\"int\",)), (\"GENUS\", (\"char\",", "(\") + 8:-3] table_rows = row_val.replace(\"\\r\\n\",\"\").split(\"),(\") for i_row in table_rows:", "']') csv_writer.writerows([v]) if csv_file: csv_file.close() # Create abundance table table", "(\"DATA_SOURCE\", (\"char\", 250)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"contacts.csv\")) #", "(\"CENT_LONG\", (\"double\",)), (\"NUMBER_OF_SPECIES\", (\"char\",)), (\"NUMBER_OF_SAMPLES\", (\"char\",)), (\"NUMBER_LAT_LONG\", (\"char\",)), (\"TOTAL\", (\"char\",)),", "table_rows = row_val.replace(\"\\r\\n\",\"\").split(\"),(\") for i_row in table_rows: v = eval('['", "Script.__init__(self, **kwargs) self.title = \"Commercial Fisheries Monthly Trade Data by", "(\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CITATION_LINE\", (\"char\",)), ] engine.table = table engine.create_table()", "(\"double\",)), (\"CENT_LONG\", (\"double\",)), (\"NUMBER_OF_SPECIES\", (\"char\",)), (\"NUMBER_OF_SAMPLES\", (\"char\",)), (\"NUMBER_LAT_LONG\", (\"char\",)), (\"TOTAL\",", "200)), (\"WEB_LINK\", (\"char\", 200)), (\"DATA_SOURCE\", (\"char\", 250)), ] engine.table =", "200)), (\"TREAT_COMMENTS\", (\"char\", 250)), (\"TREAT_DATE\", (\"char\", 100)), (\"CEN_LATITUDE\", (\"double\",)), (\"CEN_LONGITUDE\",", "+ str(i_row) + ']') csv_writer.writerows([v]) if csv_file: csv_file.close() # Create", "table_indicator = \"-- Table structure for table \" if line.startswith(table_indicator):", "header_rows=0, contains_pk=False) table.columns = [ (\"ID_SITE\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"REALM\",", "def __init__(self, **kwargs): Script.__init__(self, **kwargs) self.title = \"Commercial Fisheries Monthly", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SAMPLE\", (\"int\",)), (\"ID_TREAT\", (\"int\",)),", "= Table(\"allrawdata\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_ALL_RAW_DATA\", (\"int\",)),", "= [ (\"ID_CITATION1\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CITATION_LINE\", (\"char\",)), ] engine.table", "engine.insert_data_from_file(engine.format_filename(\"contacts.csv\")) # Create countries table table = Table(\"countries\", delimiter=\",\", header_rows=0,", "= [(\"COUNT_ID\", (\"int\",)), (\"COUNTRY_NAME\", (\"char\", 200))] engine.table = table engine.create_table()", "(\"char\", 250)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"contacts.csv\")) # Create", "# Create downloads table table = Table(\"downloads\", delimiter=\",\", header_rows=0, contains_pk=False)", "(\"CEN_LONGITUDE\", (\"double\",)), (\"HABITAT\", (\"char\", 100)), (\"PROTECTED_AREA\", (\"char\", 50)), (\"AREA\", (\"double\",)),", "] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"contacts.csv\")) # Create countries table", "250)), (\"DATE_STAMP\", (\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"downloads.csv\")) #", "def download(self, engine=None, debug=False): Script.download(self, engine, debug) engine = self.engine", "engine.insert_data_from_file(engine.format_filename(\"biomass.csv\")) # Create citation1 table table = Table(\"citation1\", delimiter=\",\", header_rows=0,", "al. BioTIME: A database of biodiversity time series for the", "Create curation table table = Table(\"curation\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "(\"int\",)), (\"GENUS\", (\"char\", 100)), (\"SPECIES\", (\"char\", 100)), (\"GENUS_SPECIES\", (\"char\", 100))", "None csv_file = None table_name = None NULL = None", "(\"char\", 100)), (\"GENUS_SPECIES\", (\"char\", 100)) ] engine.table = table engine.create_table()", "Script.download(self, engine, debug) engine = self.engine original_sql_file = \"BioTIMESQL02_04_2018.sql\" engine.download_file(self.urls[\"sql_file\"],", "Script try: from retriever.lib.defaults import VERSION try: from retriever.lib.tools import", "try: from retriever.lib.tools import open_fr, open_fw, open_csvw except ImportError: from", "in sql_data: table_indicator = \"-- Table structure for table \"", "(\"HAS_PLOT\", (\"char\", 10)), (\"DATA_POINTS\", (\"char\",)), (\"START_YEAR\", (\"char\",)), (\"END_YEAR\", (\"char\",)), (\"CENT_LAT\",", "(\"char\", )), (\"BIO_TYPE\", (\"char\",)), (\"SAMPLE_TYPE\", (\"char\",)), ] engine.table = table", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"downloads.csv\")) # Create methods table table = Table(\"methods\", delimiter=\",\",", "(\"char\",)), (\"NUMBER_OF_SAMPLES\", (\"char\",)), (\"NUMBER_LAT_LONG\", (\"char\",)), (\"TOTAL\", (\"char\",)), (\"GRAIN_SIZE_TEXT\", (\"char\",)), (\"GRAIN_SQ_KM\",", "(\"char\",)), (\"GRAIN_SQ_KM\", (\"double\",)), (\"AREA_SQ_KM\", (\"double\",)), (\"AB_TYPE\", (\"char\", )), (\"BIO_TYPE\", (\"char\",)),", "\"1.0.1\" self.ref = \"https://zenodo.org/record/1095628#.WskN7dPwYyn\" self.citation = \"<NAME>, <NAME>, <NAME>, et", "Table(\"site\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SITE\", (\"int\",)), (\"STUDY_ID\",", "(\"char\", \"100\")), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"abundance.csv\")) # Create", "60)), (\"LICENSE\", (\"char\", 200)), (\"WEB_LINK\", (\"char\", 200)), (\"DATA_SOURCE\", (\"char\", 250)),", "= \"{name}.csv\".format(name=table_name) csv_file = open_fw(engine.format_filename(out_file)) csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL) set_open", "[ (\"D_ID\", (\"int\",)), (\"STUDY\", (\"char\", 25)), (\"NAME\", (\"char\", 150)), (\"EMAIL\",", "assemblages through time.\" self.keywords = [\"Time series\", \"Anthropocene\", \"Global\"] self.licenses", "self.licenses = [{\"name\": \"CC BY 4.0\"}] self.encoding = \"latin1\" if", "50)), (\"AREA\", (\"double\",)), (\"BIOME_MAP\", (\"char\", 500)) ] engine.table = table", "(\"char\",)), (\"NUMBER_LAT_LONG\", (\"char\",)), (\"TOTAL\", (\"char\",)), (\"GRAIN_SIZE_TEXT\", (\"char\",)), (\"GRAIN_SQ_KM\", (\"double\",)), (\"AREA_SQ_KM\",", "(\"double\",)), (\"CEN_LONGITUDE\", (\"double\",)), (\"HABITAT\", (\"char\", 100)), (\"PROTECTED_AREA\", (\"char\", 50)), (\"AREA\",", "# Create contacts table table = Table(\"contacts\", delimiter=\",\", header_rows=0, contains_pk=False)", "Monthly Trade Data by Product, Country/Association\" self.name = \"biotimesql\" self.retriever_minimum_version", "in ecological assemblages through time.\" self.keywords = [\"Time series\", \"Anthropocene\",", "table = Table(\"contacts\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CONTACTS\",", "Create species table table = Table(\"species\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CITATION1\", (\"int\",)), (\"STUDY_ID\", (\"int\",)),", "200)), (\"PLOT\", (\"char\", 150)), (\"LATITUDE\", (\"double\",)), (\"LONGITUDE\", (\"double\",)), (\"DEPTH\", (\"double\",)),", "200)), (\"DATA_SOURCE\", (\"char\", 250)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"contacts.csv\"))", "self.description = \"The BioTIME database has species identities and abundances", "] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"abundance.csv\")) # Create allrawdata table", "(\"TITLE\", (\"char\",800)), (\"AB_BIO\", (\"char\", 2)), (\"HAS_PLOT\", (\"char\", 10)), (\"DATA_POINTS\", (\"char\",)),", "(\"char\", 100)), (\"CEN_LATITUDE\", (\"double\",)), (\"CEN_LONGITUDE\", (\"double\",)), (\"HABITAT\", (\"char\", 100)), (\"PROTECTED_AREA\",", "(\"char\", 200)), (\"TREAT_COMMENTS\", (\"char\", 250)), (\"TREAT_DATE\", (\"char\", 100)), (\"CEN_LATITUDE\", (\"double\",)),", "(\"char\", 200))] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"countries.csv\")) # Create curation", "self.name = \"biotimesql\" self.retriever_minimum_version = \"2.2.0\" self.urls = { \"sql_file\":", "curation table table = Table(\"curation\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "\"100\"))] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"biomass.csv\")) # Create citation1 table", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"datasets.csv\")) # Create downloads table table", "class main(Script): def __init__(self, **kwargs): Script.__init__(self, **kwargs) self.title = \"Commercial", "= Table(\"contacts\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CONTACTS\", (\"int\",)),", "50)), (\"ORGANISMS\", (\"char\", 200)), (\"TITLE\", (\"char\",800)), (\"AB_BIO\", (\"char\", 2)), (\"HAS_PLOT\",", "(\"char\", 500)), (\"LOCATION\", (\"char\", 250)), (\"DATE_STAMP\", (\"char\",)), ] engine.table =", "500)) ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"site.csv\")) # Create species", "datasets table table = Table(\"datasets\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "50)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"curation.csv\")) # Create datasets", "engine.insert_data_from_file(engine.format_filename(\"sample.csv\")) # Create site table table = Table(\"site\", delimiter=\",\", header_rows=0,", "engine=None, debug=False): Script.download(self, engine, debug) engine = self.engine original_sql_file =", "\"2.2.0\" self.urls = { \"sql_file\": \"https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1\", } self.version = \"1.0.1\"", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"allrawdata.csv\")) # Create biomass table table", "ImportError: from retriever.lib.scripts import open_fr, open_fw except ImportError: from retriever", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"methods.csv\")) # Create sample table table =", "[ (\"ID_DATASETS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"TAXA\", (\"char\", 50)), (\"ORGANISMS\", (\"char\",", "open_fr, open_fw except ImportError: from retriever import open_fr, open_fw, VERSION", "Fisheries Monthly Trade Data by Product, Country/Association\" self.name = \"biotimesql\"", "table.columns = [ (\"ID_CITATION1\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CITATION_LINE\", (\"char\",)), ]", "self.citation = \"<NAME>, <NAME>, <NAME>, et al. BioTIME: A database", "VERSION try: from retriever.lib.tools import open_fr, open_fw, open_csvw except ImportError:", "original_sql_file) sql_data = open_fr(self.engine.format_filename(original_sql_file)) set_open = False csv_writer = None", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SITE\", (\"int\",)), (\"STUDY_ID\", (\"int\",)),", "table table = Table(\"biomass\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [(\"ID_BIOMASS\",", "None for line in sql_data: table_indicator = \"-- Table structure", "= Table(\"curation\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CURATION\", (\"int\",)),", "(\"char\", 150)), (\"EMAIL\", (\"char\", 150)), (\"COUNTRY\", (\"char\", 200)), (\"ROLE\", (\"char\",", "500)), (\"CONTACT_2\", (\"char\", 500)), (\"CONT_1_MAIL\", (\"char\", 60)), (\"CONT_2_MAIL\", (\"char\", 60)),", "(\"ID_ALL_RAW_DATA\", (\"int\",)), (\"ABUNDANCE\", (\"double\",)), (\"BIOMASS\", (\"double\",)), (\"ID_SPECIES\", (\"int\",)), (\"SAMPLE_DESC\", (\"char\",", "table_name current_file_open = current_file_process if set_open and not current_file_process ==", "= st.strip() current_file_process = table_name current_file_open = current_file_process if set_open", "Table(\"contacts\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CONTACTS\", (\"int\",)), (\"STUDY_ID\",", "(\"TREAT_COMMENTS\", (\"char\", 250)), (\"TREAT_DATE\", (\"char\", 100)), (\"CEN_LATITUDE\", (\"double\",)), (\"CEN_LONGITUDE\", (\"double\",)),", "table table = Table(\"downloads\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [", "from retriever.lib.templates import Script try: from retriever.lib.defaults import VERSION try:", "100)), (\"SPECIES\", (\"char\", 100)), (\"GENUS_SPECIES\", (\"char\", 100)) ] engine.table =", "species table table = Table(\"species\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "(\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CONTACT_1\", (\"char\", 500)), (\"CONTACT_2\", (\"char\", 500)), (\"CONT_1_MAIL\",", "table.columns = [ (\"ID_SAMPLE\", (\"int\",)), (\"ID_TREAT\", (\"int\",)), (\"SAMPLE_DESC_NAME\", (\"char\", 200)),", "200)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"sample.csv\")) # Create site", "(\"ID_TREAT\", (\"int\",)), (\"SAMPLE_DESC_NAME\", (\"char\", 200)), ] engine.table = table engine.create_table()", "(\"double\",)), (\"HABITAT\", (\"char\", 100)), (\"PROTECTED_AREA\", (\"char\", 50)), (\"AREA\", (\"double\",)), (\"BIOME_MAP\",", "(\"PURPOSE\", (\"char\", 500)), (\"LOCATION\", (\"char\", 250)), (\"DATE_STAMP\", (\"char\",)), ] engine.table", "Create contacts table table = Table(\"contacts\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "(\"char\", 500)), (\"CONT_1_MAIL\", (\"char\", 60)), (\"CONT_2_MAIL\", (\"char\", 60)), (\"LICENSE\", (\"char\",", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"site.csv\")) # Create species table table", "= \"https://zenodo.org/record/1095628#.WskN7dPwYyn\" self.citation = \"<NAME>, <NAME>, <NAME>, et al. BioTIME:", "current_file_open = current_file_process if set_open and not current_file_process == current_file_open:", "**kwargs) self.title = \"Commercial Fisheries Monthly Trade Data by Product,", "(\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"REALM\", (\"char\", 11)), (\"CLIMATE\", (\"char\", 20)), (\"GENERAL_TREAT\",", "if line.startswith(table_indicator): st = line[len(table_indicator):].replace(\"`\", \"\") table_name = st.strip() current_file_process", "(\"AB_BIO\", (\"char\", 2)), (\"HAS_PLOT\", (\"char\", 10)), (\"DATA_POINTS\", (\"char\",)), (\"START_YEAR\", (\"char\",)),", "(\"REALM\", (\"char\", 11)), (\"CLIMATE\", (\"char\", 20)), (\"GENERAL_TREAT\", (\"char\", 200)), (\"TREATMENT\",", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_ALL_RAW_DATA\", (\"int\",)), (\"ABUNDANCE\", (\"double\",)),", "parse_version(VERSION) <= parse_version(\"2.0.0\"): self.shortname = self.name self.name = self.title self.tags", "= Table(\"downloads\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"D_ID\", (\"int\",)),", "(\"PROTECTED_AREA\", (\"char\", 50)), (\"AREA\", (\"double\",)), (\"BIOME_MAP\", (\"char\", 500)) ] engine.table", "contacts table table = Table(\"contacts\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "[(\"COUNT_ID\", (\"int\",)), (\"COUNTRY_NAME\", (\"char\", 200))] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"countries.csv\"))", "line in sql_data: table_indicator = \"-- Table structure for table", "(\"char\", 50)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"curation.csv\")) # Create", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"citation1.csv\")) # Create contacts table table", "<= parse_version(\"2.0.0\"): self.shortname = self.name self.name = self.title self.tags =", "csv_file: csv_file.close() # Create abundance table table = Table(\"ID_ABUNDANCE\", delimiter=\",\",", "Create allrawdata table table = Table(\"allrawdata\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "(\"double\",)), (\"AB_TYPE\", (\"char\", )), (\"BIO_TYPE\", (\"char\",)), (\"SAMPLE_TYPE\", (\"char\",)), ] engine.table", "(\"SAMPLE_DESC_NAME\", (\"char\", 200)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"sample.csv\")) #", "(\"char\", 100)), (\"PROTECTED_AREA\", (\"char\", 50)), (\"AREA\", (\"double\",)), (\"BIOME_MAP\", (\"char\", 500))", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_ABUNDANCE\", (\"int\",)), (\"ABUNDANCE_TYPE\", (\"char\",", "ImportError: from retriever import open_fr, open_fw, VERSION class main(Script): def", "self.shortname = self.name self.name = self.title self.tags = self.keywords def", "csv.writer(csv_file, quoting=csv.QUOTE_ALL) set_open = True if line.startswith(\"INSERT INTO `{table_name}`\".format(table_name=table_name)): row_val", "Table(\"citation1\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CITATION1\", (\"int\",)), (\"STUDY_ID\",", "table_name = st.strip() current_file_process = table_name current_file_open = current_file_process if", "(\"int\",)), (\"COMMENTS\", (\"char\",)), (\"DATE_STUDY_ADDED\", (\"char\", 50)), ] engine.table = table", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"countries.csv\")) # Create curation table table =", "# Create allrawdata table table = Table(\"allrawdata\", delimiter=\",\", header_rows=0, contains_pk=False)", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"curation.csv\")) # Create datasets table table = Table(\"datasets\", delimiter=\",\",", "(\"CITATION_LINE\", (\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"citation1.csv\")) # Create", "(\"ID_CITATION1\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CITATION_LINE\", (\"char\",)), ] engine.table = table", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"site.csv\")) # Create species table table = Table(\"species\",", "in table_rows: v = eval('[' + str(i_row) + ']') csv_writer.writerows([v])", "(\"BIOMASS_TYPE\", (\"char\", \"100\"))] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"biomass.csv\")) # Create", "= self.keywords def download(self, engine=None, debug=False): Script.download(self, engine, debug) engine", "8:-3] table_rows = row_val.replace(\"\\r\\n\",\"\").split(\"),(\") for i_row in table_rows: v =", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"sample.csv\")) # Create site table table = Table(\"site\", delimiter=\",\",", "\"\") table_name = st.strip() current_file_process = table_name current_file_open = current_file_process", "(\"ID_SAMPLE\", (\"int\",)), (\"ID_TREAT\", (\"int\",)), (\"SAMPLE_DESC_NAME\", (\"char\", 200)), ] engine.table =", "\"CC BY 4.0\"}] self.encoding = \"latin1\" if parse_version(VERSION) <= parse_version(\"2.0.0\"):", "Table(\"methods\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_METHODS\", (\"int\",)), (\"STUDY_ID\",", "self.engine original_sql_file = \"BioTIMESQL02_04_2018.sql\" engine.download_file(self.urls[\"sql_file\"], original_sql_file) sql_data = open_fr(self.engine.format_filename(original_sql_file)) set_open", "(\"char\", 50)), (\"AREA\", (\"double\",)), (\"BIOME_MAP\", (\"char\", 500)) ] engine.table =", "(\"int\",)), (\"TAXA\", (\"char\", 50)), (\"ORGANISMS\", (\"char\", 200)), (\"TITLE\", (\"char\",800)), (\"AB_BIO\",", "(\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"datasets.csv\")) # Create downloads", "et al. BioTIME: A database of biodiversity time series for", "(\"CONT_2_MAIL\", (\"char\", 60)), (\"LICENSE\", (\"char\", 200)), (\"WEB_LINK\", (\"char\", 200)), (\"DATA_SOURCE\",", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"biomass.csv\")) # Create citation1 table table = Table(\"citation1\", delimiter=\",\",", "download(self, engine=None, debug=False): Script.download(self, engine, debug) engine = self.engine original_sql_file", "(\"int\",)), (\"REALM\", (\"char\", 11)), (\"CLIMATE\", (\"char\", 20)), (\"GENERAL_TREAT\", (\"char\", 200)),", "Create site table table = Table(\"site\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "table.columns = [ (\"ID_SPECIES\", (\"int\",)), (\"GENUS\", (\"char\", 100)), (\"SPECIES\", (\"char\",", "VERSION class main(Script): def __init__(self, **kwargs): Script.__init__(self, **kwargs) self.title =", "else: out_file = \"{name}.csv\".format(name=table_name) csv_file = open_fw(engine.format_filename(out_file)) csv_writer = csv.writer(csv_file,", "allrawdata table table = Table(\"allrawdata\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "= self.engine original_sql_file = \"BioTIMESQL02_04_2018.sql\" engine.download_file(self.urls[\"sql_file\"], original_sql_file) sql_data = open_fr(self.engine.format_filename(original_sql_file))", "(\"NAME\", (\"char\", 150)), (\"EMAIL\", (\"char\", 150)), (\"COUNTRY\", (\"char\", 200)), (\"ROLE\",", "engine.insert_data_from_file(engine.format_filename(\"abundance.csv\")) # Create allrawdata table table = Table(\"allrawdata\", delimiter=\",\", header_rows=0,", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"contacts.csv\")) # Create countries table table", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"datasets.csv\")) # Create downloads table table =", "(\"LOCATION\", (\"char\", 250)), (\"DATE_STAMP\", (\"char\",)), ] engine.table = table engine.create_table()", "\"https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1\", } self.version = \"1.0.1\" self.ref = \"https://zenodo.org/record/1095628#.WskN7dPwYyn\" self.citation =", "[ (\"ID_SAMPLE\", (\"int\",)), (\"ID_TREAT\", (\"int\",)), (\"SAMPLE_DESC_NAME\", (\"char\", 200)), ] engine.table", "retriever.lib.scripts import open_fr, open_fw except ImportError: from retriever import open_fr,", "] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"curation.csv\")) # Create datasets table", "INTO `{table_name}`\".format(table_name=table_name)): row_val = line[line.index(\"VALUES (\") + 8:-3] table_rows =", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_METHODS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)),", "retriever.lib.defaults import VERSION try: from retriever.lib.tools import open_fr, open_fw, open_csvw", "Table structure for table \" if line.startswith(table_indicator): st = line[len(table_indicator):].replace(\"`\",", "if line.startswith(\"INSERT INTO `{table_name}`\".format(table_name=table_name)): row_val = line[line.index(\"VALUES (\") + 8:-3]", "contains_pk=False) table.columns = [ (\"ID_SPECIES\", (\"int\",)), (\"GENUS\", (\"char\", 100)), (\"SPECIES\",", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"countries.csv\")) # Create curation table table = Table(\"curation\", delimiter=\",\",", "(\"TAXA\", (\"char\", 50)), (\"ORGANISMS\", (\"char\", 200)), (\"TITLE\", (\"char\",800)), (\"AB_BIO\", (\"char\",", "csv_writer.writerows([v]) if csv_file: csv_file.close() # Create abundance table table =", "eval('[' + str(i_row) + ']') csv_writer.writerows([v]) if csv_file: csv_file.close() #", "Create biomass table table = Table(\"biomass\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "# Create sample table table = Table(\"sample\", delimiter=\",\", header_rows=0, contains_pk=False)", "(\"double\",)), (\"DAY\", (\"int\",)), (\"MONTH\", (\"int\",)), (\"YEAR\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), ]", "(\"AB_TYPE\", (\"char\", )), (\"BIO_TYPE\", (\"char\",)), (\"SAMPLE_TYPE\", (\"char\",)), ] engine.table =", "<NAME>, et al. BioTIME: A database of biodiversity time series", "(\"COUNTRY_NAME\", (\"char\", 200))] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"countries.csv\")) # Create", "out_file = \"{name}.csv\".format(name=table_name) csv_file = open_fw(engine.format_filename(out_file)) csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)", "table table = Table(\"datasets\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [", "[\"Time series\", \"Anthropocene\", \"Global\"] self.licenses = [{\"name\": \"CC BY 4.0\"}]", "debug) engine = self.engine original_sql_file = \"BioTIMESQL02_04_2018.sql\" engine.download_file(self.urls[\"sql_file\"], original_sql_file) sql_data", "table.columns = [ (\"ID_METHODS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"METHODS\", (\"char\",)), (\"SUMMARY_METHODS\",", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"biomass.csv\")) # Create citation1 table table = Table(\"citation1\",", "= \"<NAME>, <NAME>, <NAME>, et al. BioTIME: A database of", "(\"HABITAT\", (\"char\", 100)), (\"PROTECTED_AREA\", (\"char\", 50)), (\"AREA\", (\"double\",)), (\"BIOME_MAP\", (\"char\",", "\"biotimesql\" self.retriever_minimum_version = \"2.2.0\" self.urls = { \"sql_file\": \"https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1\", }", "header_rows=0, contains_pk=False) table.columns = [ (\"ID_CITATION1\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CITATION_LINE\",", "(\"NUMBER_OF_SAMPLES\", (\"char\",)), (\"NUMBER_LAT_LONG\", (\"char\",)), (\"TOTAL\", (\"char\",)), (\"GRAIN_SIZE_TEXT\", (\"char\",)), (\"GRAIN_SQ_KM\", (\"double\",)),", "(\"ID_ABUNDANCE\", (\"int\",)), (\"ABUNDANCE_TYPE\", (\"char\", \"100\")), ] engine.table = table engine.create_table()", "[ (\"ID_ALL_RAW_DATA\", (\"int\",)), (\"ABUNDANCE\", (\"double\",)), (\"BIOMASS\", (\"double\",)), (\"ID_SPECIES\", (\"int\",)), (\"SAMPLE_DESC\",", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"allrawdata.csv\")) # Create biomass table table =", "self.name = self.title self.tags = self.keywords def download(self, engine=None, debug=False):", "(\"COUNTRY\", (\"char\", 200)), (\"ROLE\", (\"char\", 150)), (\"PURPOSE\", (\"char\", 500)), (\"LOCATION\",", "table = Table(\"ID_ABUNDANCE\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_ABUNDANCE\",", "engine = self.engine original_sql_file = \"BioTIMESQL02_04_2018.sql\" engine.download_file(self.urls[\"sql_file\"], original_sql_file) sql_data =", "self.encoding = \"latin1\" if parse_version(VERSION) <= parse_version(\"2.0.0\"): self.shortname = self.name", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"abundance.csv\")) # Create allrawdata table table = Table(\"allrawdata\",", "self.retriever_minimum_version = \"2.2.0\" self.urls = { \"sql_file\": \"https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1\", } self.version", "(\"char\", 200)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"sample.csv\")) # Create", "site table table = Table(\"site\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "100)) ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"species.csv\")) SCRIPT = main()", "pkg_resources import parse_version from retriever.lib.models import Table from retriever.lib.templates import", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"D_ID\", (\"int\",)), (\"STUDY\", (\"char\",", "contains_pk=False) table.columns = [ (\"ID_CITATION1\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CITATION_LINE\", (\"char\",)),", "(\"double\",)), (\"AREA_SQ_KM\", (\"double\",)), (\"AB_TYPE\", (\"char\", )), (\"BIO_TYPE\", (\"char\",)), (\"SAMPLE_TYPE\", (\"char\",)),", "(\"BIOMASS\", (\"double\",)), (\"ID_SPECIES\", (\"int\",)), (\"SAMPLE_DESC\", (\"char\", 200)), (\"PLOT\", (\"char\", 150)),", "str(i_row) + ']') csv_writer.writerows([v]) if csv_file: csv_file.close() # Create abundance", "table = Table(\"downloads\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"D_ID\",", "open_fw, open_csvw except ImportError: from retriever.lib.scripts import open_fr, open_fw except", "(\"MONTH\", (\"int\",)), (\"YEAR\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), ] engine.table = table", "row_val.replace(\"\\r\\n\",\"\").split(\"),(\") for i_row in table_rows: v = eval('[' + str(i_row)", "\" if line.startswith(table_indicator): st = line[len(table_indicator):].replace(\"`\", \"\") table_name = st.strip()", "(\"char\", 200)), (\"PLOT\", (\"char\", 150)), (\"LATITUDE\", (\"double\",)), (\"LONGITUDE\", (\"double\",)), (\"DEPTH\",", "(\"START_YEAR\", (\"char\",)), (\"END_YEAR\", (\"char\",)), (\"CENT_LAT\", (\"double\",)), (\"CENT_LONG\", (\"double\",)), (\"NUMBER_OF_SPECIES\", (\"char\",)),", "(\"char\", 100)) ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"species.csv\")) SCRIPT =", "# -*- coding: utf-8 -*- #retriever import csv from pkg_resources", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [(\"COUNT_ID\", (\"int\",)), (\"COUNTRY_NAME\", (\"char\", 200))]", "from retriever.lib.models import Table from retriever.lib.templates import Script try: from", "= [ (\"ID_CURATION\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"LINK_ID\", (\"int\",)), (\"COMMENTS\", (\"char\",)),", "Biogeography. 2018; 00:1 - 26. https://doi.org/10.1111/geb.12729.\" self.description = \"The BioTIME", "set_open = False else: out_file = \"{name}.csv\".format(name=table_name) csv_file = open_fw(engine.format_filename(out_file))", "Create methods table table = Table(\"methods\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"biomass.csv\")) # Create citation1 table table =", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"countries.csv\")) # Create curation table table", "(\"char\", 150)), (\"COUNTRY\", (\"char\", 200)), (\"ROLE\", (\"char\", 150)), (\"PURPOSE\", (\"char\",", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"abundance.csv\")) # Create allrawdata table table", "header_rows=0, contains_pk=False) table.columns = [ (\"ID_ABUNDANCE\", (\"int\",)), (\"ABUNDANCE_TYPE\", (\"char\", \"100\")),", "(\"PLOT\", (\"char\", 150)), (\"LATITUDE\", (\"double\",)), (\"LONGITUDE\", (\"double\",)), (\"DEPTH\", (\"double\",)), (\"DAY\",", "= [{\"name\": \"CC BY 4.0\"}] self.encoding = \"latin1\" if parse_version(VERSION)", "False csv_writer = None csv_file = None table_name = None", "current_file_process == current_file_open: csv_file.close() set_open = False else: out_file =", "Create datasets table table = Table(\"datasets\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "= [ (\"ID_CONTACTS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CONTACT_1\", (\"char\", 500)), (\"CONTACT_2\",", "open_fr, open_fw, VERSION class main(Script): def __init__(self, **kwargs): Script.__init__(self, **kwargs)", "# Create countries table table = Table(\"countries\", delimiter=\",\", header_rows=0, contains_pk=False)", "coding: utf-8 -*- #retriever import csv from pkg_resources import parse_version", "table.columns = [(\"COUNT_ID\", (\"int\",)), (\"COUNTRY_NAME\", (\"char\", 200))] engine.table = table", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"citation1.csv\")) # Create contacts table table =", "100)), (\"CEN_LATITUDE\", (\"double\",)), (\"CEN_LONGITUDE\", (\"double\",)), (\"HABITAT\", (\"char\", 100)), (\"PROTECTED_AREA\", (\"char\",", "\"Commercial Fisheries Monthly Trade Data by Product, Country/Association\" self.name =", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"contacts.csv\")) # Create countries table table =", "(\"SUMMARY_METHODS\", (\"char\", 500)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"methods.csv\")) #", "database of biodiversity time series for the Anthropocene. Global Ecology", "Table(\"sample\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SAMPLE\", (\"int\",)), (\"ID_TREAT\",", "Product, Country/Association\" self.name = \"biotimesql\" self.retriever_minimum_version = \"2.2.0\" self.urls =", "Trade Data by Product, Country/Association\" self.name = \"biotimesql\" self.retriever_minimum_version =", "= \"Commercial Fisheries Monthly Trade Data by Product, Country/Association\" self.name", "table table = Table(\"methods\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"abundance.csv\")) # Create allrawdata table table = Table(\"allrawdata\", delimiter=\",\",", "table.columns = [ (\"ID_DATASETS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"TAXA\", (\"char\", 50)),", "(\"double\",)), (\"NUMBER_OF_SPECIES\", (\"char\",)), (\"NUMBER_OF_SAMPLES\", (\"char\",)), (\"NUMBER_LAT_LONG\", (\"char\",)), (\"TOTAL\", (\"char\",)), (\"GRAIN_SIZE_TEXT\",", "(\"ID_CURATION\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"LINK_ID\", (\"int\",)), (\"COMMENTS\", (\"char\",)), (\"DATE_STUDY_ADDED\", (\"char\",", "(\"METHODS\", (\"char\",)), (\"SUMMARY_METHODS\", (\"char\", 500)), ] engine.table = table engine.create_table()", "time series for the Anthropocene. Global Ecology & Biogeography. 2018;", "2)), (\"HAS_PLOT\", (\"char\", 10)), (\"DATA_POINTS\", (\"char\",)), (\"START_YEAR\", (\"char\",)), (\"END_YEAR\", (\"char\",)),", "(\"int\",)), (\"CITATION_LINE\", (\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"citation1.csv\")) #", "csv_writer = None csv_file = None table_name = None NULL", "= Table(\"biomass\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [(\"ID_BIOMASS\", (\"int\",)), (\"BIOMASS_TYPE\",", "line[line.index(\"VALUES (\") + 8:-3] table_rows = row_val.replace(\"\\r\\n\",\"\").split(\"),(\") for i_row in", "(\"int\",)), (\"STUDY\", (\"char\", 25)), (\"NAME\", (\"char\", 150)), (\"EMAIL\", (\"char\", 150)),", "(\"char\", 200)), (\"ROLE\", (\"char\", 150)), (\"PURPOSE\", (\"char\", 500)), (\"LOCATION\", (\"char\",", "(\"char\", 10)), (\"DATA_POINTS\", (\"char\",)), (\"START_YEAR\", (\"char\",)), (\"END_YEAR\", (\"char\",)), (\"CENT_LAT\", (\"double\",)),", "series\", \"Anthropocene\", \"Global\"] self.licenses = [{\"name\": \"CC BY 4.0\"}] self.encoding", "= None table_name = None NULL = None for line", "table_name = None NULL = None for line in sql_data:", "= [ (\"ID_DATASETS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"TAXA\", (\"char\", 50)), (\"ORGANISMS\",", "(\"DATE_STUDY_ADDED\", (\"char\", 50)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"curation.csv\")) #", "(\"STUDY\", (\"char\", 25)), (\"NAME\", (\"char\", 150)), (\"EMAIL\", (\"char\", 150)), (\"COUNTRY\",", "line.startswith(table_indicator): st = line[len(table_indicator):].replace(\"`\", \"\") table_name = st.strip() current_file_process =", "Table(\"downloads\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"D_ID\", (\"int\",)), (\"STUDY\",", "import open_fr, open_fw, open_csvw except ImportError: from retriever.lib.scripts import open_fr,", "(\"char\", 11)), (\"CLIMATE\", (\"char\", 20)), (\"GENERAL_TREAT\", (\"char\", 200)), (\"TREATMENT\", (\"char\",", "(\"DATA_POINTS\", (\"char\",)), (\"START_YEAR\", (\"char\",)), (\"END_YEAR\", (\"char\",)), (\"CENT_LAT\", (\"double\",)), (\"CENT_LONG\", (\"double\",)),", "00:1 - 26. https://doi.org/10.1111/geb.12729.\" self.description = \"The BioTIME database has", "(\"int\",)), (\"SAMPLE_DESC_NAME\", (\"char\", 200)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"sample.csv\"))", "= None csv_file = None table_name = None NULL =", "200)), (\"ROLE\", (\"char\", 150)), (\"PURPOSE\", (\"char\", 500)), (\"LOCATION\", (\"char\", 250)),", "(\"double\",)), (\"ID_SPECIES\", (\"int\",)), (\"SAMPLE_DESC\", (\"char\", 200)), (\"PLOT\", (\"char\", 150)), (\"LATITUDE\",", "contains_pk=False) table.columns = [ (\"ID_CURATION\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"LINK_ID\", (\"int\",)),", "(\"char\",)), (\"GRAIN_SIZE_TEXT\", (\"char\",)), (\"GRAIN_SQ_KM\", (\"double\",)), (\"AREA_SQ_KM\", (\"double\",)), (\"AB_TYPE\", (\"char\", )),", "Table(\"curation\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CURATION\", (\"int\",)), (\"STUDY_ID\",", "(\"ORGANISMS\", (\"char\", 200)), (\"TITLE\", (\"char\",800)), (\"AB_BIO\", (\"char\", 2)), (\"HAS_PLOT\", (\"char\",", "retriever.lib.templates import Script try: from retriever.lib.defaults import VERSION try: from", "(\"char\",)), (\"START_YEAR\", (\"char\",)), (\"END_YEAR\", (\"char\",)), (\"CENT_LAT\", (\"double\",)), (\"CENT_LONG\", (\"double\",)), (\"NUMBER_OF_SPECIES\",", "(\"int\",)), (\"SAMPLE_DESC\", (\"char\", 200)), (\"PLOT\", (\"char\", 150)), (\"LATITUDE\", (\"double\",)), (\"LONGITUDE\",", "= current_file_process if set_open and not current_file_process == current_file_open: csv_file.close()", "[ (\"ID_CONTACTS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CONTACT_1\", (\"char\", 500)), (\"CONTACT_2\", (\"char\",", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"site.csv\")) # Create species table table =", "= [ (\"ID_ALL_RAW_DATA\", (\"int\",)), (\"ABUNDANCE\", (\"double\",)), (\"BIOMASS\", (\"double\",)), (\"ID_SPECIES\", (\"int\",)),", "biodiversity time series for the Anthropocene. Global Ecology & Biogeography.", "table.columns = [ (\"ID_ABUNDANCE\", (\"int\",)), (\"ABUNDANCE_TYPE\", (\"char\", \"100\")), ] engine.table", "(\"BIO_TYPE\", (\"char\",)), (\"SAMPLE_TYPE\", (\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"datasets.csv\"))", "20)), (\"GENERAL_TREAT\", (\"char\", 200)), (\"TREATMENT\", (\"char\", 200)), (\"TREAT_COMMENTS\", (\"char\", 250)),", "except ImportError: from retriever.lib.scripts import open_fr, open_fw except ImportError: from", "engine.insert_data_from_file(engine.format_filename(\"curation.csv\")) # Create datasets table table = Table(\"datasets\", delimiter=\",\", header_rows=0,", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"datasets.csv\")) # Create downloads table table = Table(\"downloads\", delimiter=\",\",", "4.0\"}] self.encoding = \"latin1\" if parse_version(VERSION) <= parse_version(\"2.0.0\"): self.shortname =", "citation1 table table = Table(\"citation1\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "\"sql_file\": \"https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1\", } self.version = \"1.0.1\" self.ref = \"https://zenodo.org/record/1095628#.WskN7dPwYyn\" self.citation", "- 26. https://doi.org/10.1111/geb.12729.\" self.description = \"The BioTIME database has species", "has species identities and abundances in ecological assemblages through time.\"", "contains_pk=False) table.columns = [ (\"ID_ALL_RAW_DATA\", (\"int\",)), (\"ABUNDANCE\", (\"double\",)), (\"BIOMASS\", (\"double\",)),", "= \"latin1\" if parse_version(VERSION) <= parse_version(\"2.0.0\"): self.shortname = self.name self.name", "(\"TREATMENT\", (\"char\", 200)), (\"TREAT_COMMENTS\", (\"char\", 250)), (\"TREAT_DATE\", (\"char\", 100)), (\"CEN_LATITUDE\",", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"sample.csv\")) # Create site table table", "\"Global\"] self.licenses = [{\"name\": \"CC BY 4.0\"}] self.encoding = \"latin1\"", "(\"char\",)), (\"SAMPLE_TYPE\", (\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"datasets.csv\")) #", "(\"SPECIES\", (\"char\", 100)), (\"GENUS_SPECIES\", (\"char\", 100)) ] engine.table = table", "] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"datasets.csv\")) # Create downloads table", "quoting=csv.QUOTE_ALL) set_open = True if line.startswith(\"INSERT INTO `{table_name}`\".format(table_name=table_name)): row_val =", "150)), (\"PURPOSE\", (\"char\", 500)), (\"LOCATION\", (\"char\", 250)), (\"DATE_STAMP\", (\"char\",)), ]", "table table = Table(\"site\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [", "for table \" if line.startswith(table_indicator): st = line[len(table_indicator):].replace(\"`\", \"\") table_name", "**kwargs): Script.__init__(self, **kwargs) self.title = \"Commercial Fisheries Monthly Trade Data", "= Table(\"sample\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SAMPLE\", (\"int\",)),", "= None for line in sql_data: table_indicator = \"-- Table", "time.\" self.keywords = [\"Time series\", \"Anthropocene\", \"Global\"] self.licenses = [{\"name\":", "(\"COMMENTS\", (\"char\",)), (\"DATE_STUDY_ADDED\", (\"char\", 50)), ] engine.table = table engine.create_table()", "table = Table(\"citation1\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CITATION1\",", "= table_name current_file_open = current_file_process if set_open and not current_file_process", "Table(\"countries\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [(\"COUNT_ID\", (\"int\",)), (\"COUNTRY_NAME\", (\"char\",", "(\"CONT_1_MAIL\", (\"char\", 60)), (\"CONT_2_MAIL\", (\"char\", 60)), (\"LICENSE\", (\"char\", 200)), (\"WEB_LINK\",", "250)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"contacts.csv\")) # Create countries", "False else: out_file = \"{name}.csv\".format(name=table_name) csv_file = open_fw(engine.format_filename(out_file)) csv_writer =", "= \"1.0.1\" self.ref = \"https://zenodo.org/record/1095628#.WskN7dPwYyn\" self.citation = \"<NAME>, <NAME>, <NAME>,", "header_rows=0, contains_pk=False) table.columns = [ (\"ID_ALL_RAW_DATA\", (\"int\",)), (\"ABUNDANCE\", (\"double\",)), (\"BIOMASS\",", "(\"char\",)), (\"SUMMARY_METHODS\", (\"char\", 500)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"methods.csv\"))", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"sample.csv\")) # Create site table table = Table(\"site\",", "= [ (\"ID_SPECIES\", (\"int\",)), (\"GENUS\", (\"char\", 100)), (\"SPECIES\", (\"char\", 100)),", "] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"allrawdata.csv\")) # Create biomass table", "(\"YEAR\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"allrawdata.csv\"))", "# Create abundance table table = Table(\"ID_ABUNDANCE\", delimiter=\",\", header_rows=0, contains_pk=False)", "\"<NAME>, <NAME>, <NAME>, et al. BioTIME: A database of biodiversity", "current_file_process = table_name current_file_open = current_file_process if set_open and not", "25)), (\"NAME\", (\"char\", 150)), (\"EMAIL\", (\"char\", 150)), (\"COUNTRY\", (\"char\", 200)),", "table = Table(\"site\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SITE\",", "[ (\"ID_ABUNDANCE\", (\"int\",)), (\"ABUNDANCE_TYPE\", (\"char\", \"100\")), ] engine.table = table", "\"Anthropocene\", \"Global\"] self.licenses = [{\"name\": \"CC BY 4.0\"}] self.encoding =", "# Create citation1 table table = Table(\"citation1\", delimiter=\",\", header_rows=0, contains_pk=False)", "# Create datasets table table = Table(\"datasets\", delimiter=\",\", header_rows=0, contains_pk=False)", "= Table(\"species\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SPECIES\", (\"int\",)),", "(\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"downloads.csv\")) # Create methods", "\"https://zenodo.org/record/1095628#.WskN7dPwYyn\" self.citation = \"<NAME>, <NAME>, <NAME>, et al. BioTIME: A", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"abundance.csv\")) # Create allrawdata table table =", "(\"double\",)), (\"BIOMASS\", (\"double\",)), (\"ID_SPECIES\", (\"int\",)), (\"SAMPLE_DESC\", (\"char\", 200)), (\"PLOT\", (\"char\",", "= [ (\"ID_ABUNDANCE\", (\"int\",)), (\"ABUNDANCE_TYPE\", (\"char\", \"100\")), ] engine.table =", "(\"LICENSE\", (\"char\", 200)), (\"WEB_LINK\", (\"char\", 200)), (\"DATA_SOURCE\", (\"char\", 250)), ]", "[ (\"ID_METHODS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"METHODS\", (\"char\",)), (\"SUMMARY_METHODS\", (\"char\", 500)),", "(\"int\",)), (\"METHODS\", (\"char\",)), (\"SUMMARY_METHODS\", (\"char\", 500)), ] engine.table = table", "(\"ID_SITE\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"REALM\", (\"char\", 11)), (\"CLIMATE\", (\"char\", 20)),", "} self.version = \"1.0.1\" self.ref = \"https://zenodo.org/record/1095628#.WskN7dPwYyn\" self.citation = \"<NAME>,", "sql_data: table_indicator = \"-- Table structure for table \" if", "[{\"name\": \"CC BY 4.0\"}] self.encoding = \"latin1\" if parse_version(VERSION) <=", "from retriever.lib.defaults import VERSION try: from retriever.lib.tools import open_fr, open_fw,", "contains_pk=False) table.columns = [ (\"ID_ABUNDANCE\", (\"int\",)), (\"ABUNDANCE_TYPE\", (\"char\", \"100\")), ]", "] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"methods.csv\")) # Create sample table", "line[len(table_indicator):].replace(\"`\", \"\") table_name = st.strip() current_file_process = table_name current_file_open =", "(\"char\", 150)), (\"PURPOSE\", (\"char\", 500)), (\"LOCATION\", (\"char\", 250)), (\"DATE_STAMP\", (\"char\",)),", "= self.name self.name = self.title self.tags = self.keywords def download(self,", "= csv.writer(csv_file, quoting=csv.QUOTE_ALL) set_open = True if line.startswith(\"INSERT INTO `{table_name}`\".format(table_name=table_name)):", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"biomass.csv\")) # Create citation1 table table", "header_rows=0, contains_pk=False) table.columns = [ (\"ID_SPECIES\", (\"int\",)), (\"GENUS\", (\"char\", 100)),", "(\"double\",)), (\"DEPTH\", (\"double\",)), (\"DAY\", (\"int\",)), (\"MONTH\", (\"int\",)), (\"YEAR\", (\"int\",)), (\"STUDY_ID\",", "= \"biotimesql\" self.retriever_minimum_version = \"2.2.0\" self.urls = { \"sql_file\": \"https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1\",", "contains_pk=False) table.columns = [(\"COUNT_ID\", (\"int\",)), (\"COUNTRY_NAME\", (\"char\", 200))] engine.table =", "header_rows=0, contains_pk=False) table.columns = [ (\"ID_METHODS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"METHODS\",", "by Product, Country/Association\" self.name = \"biotimesql\" self.retriever_minimum_version = \"2.2.0\" self.urls", "from retriever import open_fr, open_fw, VERSION class main(Script): def __init__(self,", "open_fw except ImportError: from retriever import open_fr, open_fw, VERSION class", "(\"TREAT_DATE\", (\"char\", 100)), (\"CEN_LATITUDE\", (\"double\",)), (\"CEN_LONGITUDE\", (\"double\",)), (\"HABITAT\", (\"char\", 100)),", "] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"site.csv\")) # Create species table", "and not current_file_process == current_file_open: csv_file.close() set_open = False else:", "-*- #retriever import csv from pkg_resources import parse_version from retriever.lib.models", "table.columns = [(\"ID_BIOMASS\", (\"int\",)), (\"BIOMASS_TYPE\", (\"char\", \"100\"))] engine.table = table", "(\"STUDY_ID\", (\"int\",)), (\"CONTACT_1\", (\"char\", 500)), (\"CONTACT_2\", (\"char\", 500)), (\"CONT_1_MAIL\", (\"char\",", "row_val = line[line.index(\"VALUES (\") + 8:-3] table_rows = row_val.replace(\"\\r\\n\",\"\").split(\"),(\") for", "species identities and abundances in ecological assemblages through time.\" self.keywords", "table table = Table(\"allrawdata\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [", "contains_pk=False) table.columns = [ (\"ID_CONTACTS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CONTACT_1\", (\"char\",", "(\"char\",800)), (\"AB_BIO\", (\"char\", 2)), (\"HAS_PLOT\", (\"char\", 10)), (\"DATA_POINTS\", (\"char\",)), (\"START_YEAR\",", "(\"SAMPLE_TYPE\", (\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"datasets.csv\")) # Create", "= Table(\"methods\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_METHODS\", (\"int\",)),", "(\"END_YEAR\", (\"char\",)), (\"CENT_LAT\", (\"double\",)), (\"CENT_LONG\", (\"double\",)), (\"NUMBER_OF_SPECIES\", (\"char\",)), (\"NUMBER_OF_SAMPLES\", (\"char\",)),", "= \"-- Table structure for table \" if line.startswith(table_indicator): st", "= [ (\"ID_METHODS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"METHODS\", (\"char\",)), (\"SUMMARY_METHODS\", (\"char\",", "(\"char\",)), (\"CENT_LAT\", (\"double\",)), (\"CENT_LONG\", (\"double\",)), (\"NUMBER_OF_SPECIES\", (\"char\",)), (\"NUMBER_OF_SAMPLES\", (\"char\",)), (\"NUMBER_LAT_LONG\",", "(\"ID_CONTACTS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CONTACT_1\", (\"char\", 500)), (\"CONTACT_2\", (\"char\", 500)),", "(\"char\",)), (\"END_YEAR\", (\"char\",)), (\"CENT_LAT\", (\"double\",)), (\"CENT_LONG\", (\"double\",)), (\"NUMBER_OF_SPECIES\", (\"char\",)), (\"NUMBER_OF_SAMPLES\",", "self.version = \"1.0.1\" self.ref = \"https://zenodo.org/record/1095628#.WskN7dPwYyn\" self.citation = \"<NAME>, <NAME>,", "(\"int\",)), (\"LINK_ID\", (\"int\",)), (\"COMMENTS\", (\"char\",)), (\"DATE_STUDY_ADDED\", (\"char\", 50)), ] engine.table", "(\"int\",)), (\"BIOMASS_TYPE\", (\"char\", \"100\"))] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"biomass.csv\")) #", "+ ']') csv_writer.writerows([v]) if csv_file: csv_file.close() # Create abundance table", "60)), (\"CONT_2_MAIL\", (\"char\", 60)), (\"LICENSE\", (\"char\", 200)), (\"WEB_LINK\", (\"char\", 200)),", "Create downloads table table = Table(\"downloads\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "Anthropocene. Global Ecology & Biogeography. 2018; 00:1 - 26. https://doi.org/10.1111/geb.12729.\"", "Create citation1 table table = Table(\"citation1\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "import Table from retriever.lib.templates import Script try: from retriever.lib.defaults import", "+ 8:-3] table_rows = row_val.replace(\"\\r\\n\",\"\").split(\"),(\") for i_row in table_rows: v", "(\"char\", 25)), (\"NAME\", (\"char\", 150)), (\"EMAIL\", (\"char\", 150)), (\"COUNTRY\", (\"char\",", "table = Table(\"species\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SPECIES\",", "] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"sample.csv\")) # Create site table", "(\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"LINK_ID\", (\"int\",)), (\"COMMENTS\", (\"char\",)), (\"DATE_STUDY_ADDED\", (\"char\", 50)),", "{ \"sql_file\": \"https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1\", } self.version = \"1.0.1\" self.ref = \"https://zenodo.org/record/1095628#.WskN7dPwYyn\"", "database has species identities and abundances in ecological assemblages through", "csv_file.close() # Create abundance table table = Table(\"ID_ABUNDANCE\", delimiter=\",\", header_rows=0,", "table = Table(\"methods\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_METHODS\",", "(\"char\", 60)), (\"CONT_2_MAIL\", (\"char\", 60)), (\"LICENSE\", (\"char\", 200)), (\"WEB_LINK\", (\"char\",", "for the Anthropocene. Global Ecology & Biogeography. 2018; 00:1 -", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"curation.csv\")) # Create datasets table table =", "(\"ID_SPECIES\", (\"int\",)), (\"GENUS\", (\"char\", 100)), (\"SPECIES\", (\"char\", 100)), (\"GENUS_SPECIES\", (\"char\",", "150)), (\"EMAIL\", (\"char\", 150)), (\"COUNTRY\", (\"char\", 200)), (\"ROLE\", (\"char\", 150)),", "(\"ROLE\", (\"char\", 150)), (\"PURPOSE\", (\"char\", 500)), (\"LOCATION\", (\"char\", 250)), (\"DATE_STAMP\",", "(\"int\",)), (\"MONTH\", (\"int\",)), (\"YEAR\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), ] engine.table =", "table = Table(\"sample\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SAMPLE\",", "(\"GRAIN_SQ_KM\", (\"double\",)), (\"AREA_SQ_KM\", (\"double\",)), (\"AB_TYPE\", (\"char\", )), (\"BIO_TYPE\", (\"char\",)), (\"SAMPLE_TYPE\",", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CURATION\", (\"int\",)), (\"STUDY_ID\", (\"int\",)),", "engine.insert_data_from_file(engine.format_filename(\"methods.csv\")) # Create sample table table = Table(\"sample\", delimiter=\",\", header_rows=0,", "abundance table table = Table(\"ID_ABUNDANCE\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "\"100\")), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"abundance.csv\")) # Create allrawdata", "csv from pkg_resources import parse_version from retriever.lib.models import Table from", "(\"char\", 150)), (\"LATITUDE\", (\"double\",)), (\"LONGITUDE\", (\"double\",)), (\"DEPTH\", (\"double\",)), (\"DAY\", (\"int\",)),", "11)), (\"CLIMATE\", (\"char\", 20)), (\"GENERAL_TREAT\", (\"char\", 200)), (\"TREATMENT\", (\"char\", 200)),", "[(\"ID_BIOMASS\", (\"int\",)), (\"BIOMASS_TYPE\", (\"char\", \"100\"))] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"biomass.csv\"))", "(\"EMAIL\", (\"char\", 150)), (\"COUNTRY\", (\"char\", 200)), (\"ROLE\", (\"char\", 150)), (\"PURPOSE\",", "contains_pk=False) table.columns = [ (\"ID_DATASETS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"TAXA\", (\"char\",", "Ecology & Biogeography. 2018; 00:1 - 26. https://doi.org/10.1111/geb.12729.\" self.description =", "(\"char\", 200)), (\"DATA_SOURCE\", (\"char\", 250)), ] engine.table = table engine.create_table()", "current_file_open: csv_file.close() set_open = False else: out_file = \"{name}.csv\".format(name=table_name) csv_file", "# Create methods table table = Table(\"methods\", delimiter=\",\", header_rows=0, contains_pk=False)", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"downloads.csv\")) # Create methods table table = Table(\"methods\",", "self.keywords def download(self, engine=None, debug=False): Script.download(self, engine, debug) engine =", "(\"int\",)), (\"YEAR\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), ] engine.table = table engine.create_table()", "(\"char\", 500)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"methods.csv\")) # Create", "(\"LATITUDE\", (\"double\",)), (\"LONGITUDE\", (\"double\",)), (\"DEPTH\", (\"double\",)), (\"DAY\", (\"int\",)), (\"MONTH\", (\"int\",)),", "Create sample table table = Table(\"sample\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"curation.csv\")) # Create datasets table table", "None NULL = None for line in sql_data: table_indicator =", "(\"ID_SPECIES\", (\"int\",)), (\"SAMPLE_DESC\", (\"char\", 200)), (\"PLOT\", (\"char\", 150)), (\"LATITUDE\", (\"double\",)),", "(\"int\",)), (\"ABUNDANCE\", (\"double\",)), (\"BIOMASS\", (\"double\",)), (\"ID_SPECIES\", (\"int\",)), (\"SAMPLE_DESC\", (\"char\", 200)),", "(\"STUDY_ID\", (\"int\",)), (\"TAXA\", (\"char\", 50)), (\"ORGANISMS\", (\"char\", 200)), (\"TITLE\", (\"char\",800)),", "(\"CEN_LATITUDE\", (\"double\",)), (\"CEN_LONGITUDE\", (\"double\",)), (\"HABITAT\", (\"char\", 100)), (\"PROTECTED_AREA\", (\"char\", 50)),", "(\"int\",)), (\"ABUNDANCE_TYPE\", (\"char\", \"100\")), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"abundance.csv\"))", "the Anthropocene. Global Ecology & Biogeography. 2018; 00:1 - 26.", "(\"GRAIN_SIZE_TEXT\", (\"char\",)), (\"GRAIN_SQ_KM\", (\"double\",)), (\"AREA_SQ_KM\", (\"double\",)), (\"AB_TYPE\", (\"char\", )), (\"BIO_TYPE\",", "engine.insert_data_from_file(engine.format_filename(\"downloads.csv\")) # Create methods table table = Table(\"methods\", delimiter=\",\", header_rows=0,", "engine.insert_data_from_file(engine.format_filename(\"countries.csv\")) # Create curation table table = Table(\"curation\", delimiter=\",\", header_rows=0,", "= { \"sql_file\": \"https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1\", } self.version = \"1.0.1\" self.ref =", "contains_pk=False) table.columns = [(\"ID_BIOMASS\", (\"int\",)), (\"BIOMASS_TYPE\", (\"char\", \"100\"))] engine.table =", "retriever.lib.tools import open_fr, open_fw, open_csvw except ImportError: from retriever.lib.scripts import", "Table from retriever.lib.templates import Script try: from retriever.lib.defaults import VERSION", "Table(\"allrawdata\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_ALL_RAW_DATA\", (\"int\",)), (\"ABUNDANCE\",", "(\"int\",)), (\"CONTACT_1\", (\"char\", 500)), (\"CONTACT_2\", (\"char\", 500)), (\"CONT_1_MAIL\", (\"char\", 60)),", "BioTIME database has species identities and abundances in ecological assemblages", "500)), (\"CONT_1_MAIL\", (\"char\", 60)), (\"CONT_2_MAIL\", (\"char\", 60)), (\"LICENSE\", (\"char\", 200)),", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"curation.csv\")) # Create datasets table table = Table(\"datasets\",", "methods table table = Table(\"methods\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "header_rows=0, contains_pk=False) table.columns = [(\"ID_BIOMASS\", (\"int\",)), (\"BIOMASS_TYPE\", (\"char\", \"100\"))] engine.table", "\"-- Table structure for table \" if line.startswith(table_indicator): st =", "(\"char\", 60)), (\"LICENSE\", (\"char\", 200)), (\"WEB_LINK\", (\"char\", 200)), (\"DATA_SOURCE\", (\"char\",", "(\"TOTAL\", (\"char\",)), (\"GRAIN_SIZE_TEXT\", (\"char\",)), (\"GRAIN_SQ_KM\", (\"double\",)), (\"AREA_SQ_KM\", (\"double\",)), (\"AB_TYPE\", (\"char\",", "(\"int\",)), (\"ID_TREAT\", (\"int\",)), (\"SAMPLE_DESC_NAME\", (\"char\", 200)), ] engine.table = table", "= line[len(table_indicator):].replace(\"`\", \"\") table_name = st.strip() current_file_process = table_name current_file_open", "(\"STUDY_ID\", (\"int\",)), (\"LINK_ID\", (\"int\",)), (\"COMMENTS\", (\"char\",)), (\"DATE_STUDY_ADDED\", (\"char\", 50)), ]", "[ (\"ID_CURATION\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"LINK_ID\", (\"int\",)), (\"COMMENTS\", (\"char\",)), (\"DATE_STUDY_ADDED\",", "-*- coding: utf-8 -*- #retriever import csv from pkg_resources import", "engine.download_file(self.urls[\"sql_file\"], original_sql_file) sql_data = open_fr(self.engine.format_filename(original_sql_file)) set_open = False csv_writer =", "biomass table table = Table(\"biomass\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "Table(\"species\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SPECIES\", (\"int\",)), (\"GENUS\",", "(\"ABUNDANCE\", (\"double\",)), (\"BIOMASS\", (\"double\",)), (\"ID_SPECIES\", (\"int\",)), (\"SAMPLE_DESC\", (\"char\", 200)), (\"PLOT\",", "open_fr(self.engine.format_filename(original_sql_file)) set_open = False csv_writer = None csv_file = None", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"allrawdata.csv\")) # Create biomass table table = Table(\"biomass\", delimiter=\",\",", "(\"char\", 200)), (\"TITLE\", (\"char\",800)), (\"AB_BIO\", (\"char\", 2)), (\"HAS_PLOT\", (\"char\", 10)),", "table.columns = [ (\"D_ID\", (\"int\",)), (\"STUDY\", (\"char\", 25)), (\"NAME\", (\"char\",", "try: from retriever.lib.defaults import VERSION try: from retriever.lib.tools import open_fr,", "utf-8 -*- #retriever import csv from pkg_resources import parse_version from", "500)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"methods.csv\")) # Create sample", "main(Script): def __init__(self, **kwargs): Script.__init__(self, **kwargs) self.title = \"Commercial Fisheries", "== current_file_open: csv_file.close() set_open = False else: out_file = \"{name}.csv\".format(name=table_name)", "(\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"citation1.csv\")) # Create contacts", "for line in sql_data: table_indicator = \"-- Table structure for", "= [ (\"ID_SAMPLE\", (\"int\",)), (\"ID_TREAT\", (\"int\",)), (\"SAMPLE_DESC_NAME\", (\"char\", 200)), ]", "(\"char\", 2)), (\"HAS_PLOT\", (\"char\", 10)), (\"DATA_POINTS\", (\"char\",)), (\"START_YEAR\", (\"char\",)), (\"END_YEAR\",", "= False csv_writer = None csv_file = None table_name =", "table = Table(\"datasets\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_DATASETS\",", "150)), (\"LATITUDE\", (\"double\",)), (\"LONGITUDE\", (\"double\",)), (\"DEPTH\", (\"double\",)), (\"DAY\", (\"int\",)), (\"MONTH\",", "table table = Table(\"species\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [", "table table = Table(\"curation\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [", "(\"NUMBER_OF_SPECIES\", (\"char\",)), (\"NUMBER_OF_SAMPLES\", (\"char\",)), (\"NUMBER_LAT_LONG\", (\"char\",)), (\"TOTAL\", (\"char\",)), (\"GRAIN_SIZE_TEXT\", (\"char\",)),", "\"{name}.csv\".format(name=table_name) csv_file = open_fw(engine.format_filename(out_file)) csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL) set_open =", "Country/Association\" self.name = \"biotimesql\" self.retriever_minimum_version = \"2.2.0\" self.urls = {", "(\"ID_DATASETS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"TAXA\", (\"char\", 50)), (\"ORGANISMS\", (\"char\", 200)),", "sql_data = open_fr(self.engine.format_filename(original_sql_file)) set_open = False csv_writer = None csv_file", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"allrawdata.csv\")) # Create biomass table table = Table(\"biomass\",", "(\"DAY\", (\"int\",)), (\"MONTH\", (\"int\",)), (\"YEAR\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), ] engine.table", "except ImportError: from retriever import open_fr, open_fw, VERSION class main(Script):", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"citation1.csv\")) # Create contacts table table = Table(\"contacts\",", "(\"double\",)), (\"BIOME_MAP\", (\"char\", 500)) ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"site.csv\"))", "200)), (\"TREATMENT\", (\"char\", 200)), (\"TREAT_COMMENTS\", (\"char\", 250)), (\"TREAT_DATE\", (\"char\", 100)),", "(\"BIOME_MAP\", (\"char\", 500)) ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"site.csv\")) #", "open_fw, VERSION class main(Script): def __init__(self, **kwargs): Script.__init__(self, **kwargs) self.title", "table = Table(\"curation\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CURATION\",", "= [\"Time series\", \"Anthropocene\", \"Global\"] self.licenses = [{\"name\": \"CC BY", "(\"CENT_LAT\", (\"double\",)), (\"CENT_LONG\", (\"double\",)), (\"NUMBER_OF_SPECIES\", (\"char\",)), (\"NUMBER_OF_SAMPLES\", (\"char\",)), (\"NUMBER_LAT_LONG\", (\"char\",)),", "table table = Table(\"sample\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [", "100)), (\"GENUS_SPECIES\", (\"char\", 100)) ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"species.csv\"))", "original_sql_file = \"BioTIMESQL02_04_2018.sql\" engine.download_file(self.urls[\"sql_file\"], original_sql_file) sql_data = open_fr(self.engine.format_filename(original_sql_file)) set_open =", "<NAME>, <NAME>, et al. BioTIME: A database of biodiversity time", "(\"char\", 200)), (\"TREATMENT\", (\"char\", 200)), (\"TREAT_COMMENTS\", (\"char\", 250)), (\"TREAT_DATE\", (\"char\",", "import open_fr, open_fw except ImportError: from retriever import open_fr, open_fw,", "import csv from pkg_resources import parse_version from retriever.lib.models import Table", "Create countries table table = Table(\"countries\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "(\"GENUS_SPECIES\", (\"char\", 100)) ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"species.csv\")) SCRIPT", "self.title = \"Commercial Fisheries Monthly Trade Data by Product, Country/Association\"", "\"The BioTIME database has species identities and abundances in ecological", "= None NULL = None for line in sql_data: table_indicator", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"downloads.csv\")) # Create methods table table", "(\"ABUNDANCE_TYPE\", (\"char\", \"100\")), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"abundance.csv\")) #", "= self.title self.tags = self.keywords def download(self, engine=None, debug=False): Script.download(self,", "(\"DEPTH\", (\"double\",)), (\"DAY\", (\"int\",)), (\"MONTH\", (\"int\",)), (\"YEAR\", (\"int\",)), (\"STUDY_ID\", (\"int\",)),", "from retriever.lib.scripts import open_fr, open_fw except ImportError: from retriever import", "A database of biodiversity time series for the Anthropocene. Global", "self.name self.name = self.title self.tags = self.keywords def download(self, engine=None,", "= Table(\"datasets\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_DATASETS\", (\"int\",)),", "(\"char\", \"100\"))] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"biomass.csv\")) # Create citation1", "= Table(\"site\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_SITE\", (\"int\",)),", "table = Table(\"biomass\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [(\"ID_BIOMASS\", (\"int\",)),", "26. https://doi.org/10.1111/geb.12729.\" self.description = \"The BioTIME database has species identities", "#retriever import csv from pkg_resources import parse_version from retriever.lib.models import", "import Script try: from retriever.lib.defaults import VERSION try: from retriever.lib.tools", "i_row in table_rows: v = eval('[' + str(i_row) + ']')", "(\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"METHODS\", (\"char\",)), (\"SUMMARY_METHODS\", (\"char\", 500)), ] engine.table", "__init__(self, **kwargs): Script.__init__(self, **kwargs) self.title = \"Commercial Fisheries Monthly Trade", "header_rows=0, contains_pk=False) table.columns = [ (\"D_ID\", (\"int\",)), (\"STUDY\", (\"char\", 25)),", "= open_fw(engine.format_filename(out_file)) csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL) set_open = True if", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"contacts.csv\")) # Create countries table table = Table(\"countries\",", "None table_name = None NULL = None for line in", "\"latin1\" if parse_version(VERSION) <= parse_version(\"2.0.0\"): self.shortname = self.name self.name =", "set_open = True if line.startswith(\"INSERT INTO `{table_name}`\".format(table_name=table_name)): row_val = line[line.index(\"VALUES", "(\"char\", 500)), (\"CONTACT_2\", (\"char\", 500)), (\"CONT_1_MAIL\", (\"char\", 60)), (\"CONT_2_MAIL\", (\"char\",", "downloads table table = Table(\"downloads\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "# Create curation table table = Table(\"curation\", delimiter=\",\", header_rows=0, contains_pk=False)", "= False else: out_file = \"{name}.csv\".format(name=table_name) csv_file = open_fw(engine.format_filename(out_file)) csv_writer", "= \"2.2.0\" self.urls = { \"sql_file\": \"https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1\", } self.version =", "= [(\"ID_BIOMASS\", (\"int\",)), (\"BIOMASS_TYPE\", (\"char\", \"100\"))] engine.table = table engine.create_table()", "(\"char\",)), (\"TOTAL\", (\"char\",)), (\"GRAIN_SIZE_TEXT\", (\"char\",)), (\"GRAIN_SQ_KM\", (\"double\",)), (\"AREA_SQ_KM\", (\"double\",)), (\"AB_TYPE\",", "150)), (\"COUNTRY\", (\"char\", 200)), (\"ROLE\", (\"char\", 150)), (\"PURPOSE\", (\"char\", 500)),", "through time.\" self.keywords = [\"Time series\", \"Anthropocene\", \"Global\"] self.licenses =", "= \"BioTIMESQL02_04_2018.sql\" engine.download_file(self.urls[\"sql_file\"], original_sql_file) sql_data = open_fr(self.engine.format_filename(original_sql_file)) set_open = False", "(\"STUDY_ID\", (\"int\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"allrawdata.csv\")) # Create", "NULL = None for line in sql_data: table_indicator = \"--", "Create abundance table table = Table(\"ID_ABUNDANCE\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns", "import VERSION try: from retriever.lib.tools import open_fr, open_fw, open_csvw except", "table table = Table(\"contacts\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [", "from retriever.lib.tools import open_fr, open_fw, open_csvw except ImportError: from retriever.lib.scripts", "import open_fr, open_fw, VERSION class main(Script): def __init__(self, **kwargs): Script.__init__(self,", "ecological assemblages through time.\" self.keywords = [\"Time series\", \"Anthropocene\", \"Global\"]", "`{table_name}`\".format(table_name=table_name)): row_val = line[line.index(\"VALUES (\") + 8:-3] table_rows = row_val.replace(\"\\r\\n\",\"\").split(\"),(\")", "retriever.lib.models import Table from retriever.lib.templates import Script try: from retriever.lib.defaults", "(\"GENUS\", (\"char\", 100)), (\"SPECIES\", (\"char\", 100)), (\"GENUS_SPECIES\", (\"char\", 100)) ]", "header_rows=0, contains_pk=False) table.columns = [ (\"ID_CONTACTS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CONTACT_1\",", "csv_file = None table_name = None NULL = None for", "engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"methods.csv\")) # Create sample table table", "= \"The BioTIME database has species identities and abundances in", "(\"char\", 200)), (\"WEB_LINK\", (\"char\", 200)), (\"DATA_SOURCE\", (\"char\", 250)), ] engine.table", "(\"GENERAL_TREAT\", (\"char\", 200)), (\"TREATMENT\", (\"char\", 200)), (\"TREAT_COMMENTS\", (\"char\", 250)), (\"TREAT_DATE\",", "(\"AREA_SQ_KM\", (\"double\",)), (\"AB_TYPE\", (\"char\", )), (\"BIO_TYPE\", (\"char\",)), (\"SAMPLE_TYPE\", (\"char\",)), ]", "engine.insert_data_from_file(engine.format_filename(\"citation1.csv\")) # Create contacts table table = Table(\"contacts\", delimiter=\",\", header_rows=0,", "(\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"TAXA\", (\"char\", 50)), (\"ORGANISMS\", (\"char\", 200)), (\"TITLE\",", "(\"CONTACT_2\", (\"char\", 500)), (\"CONT_1_MAIL\", (\"char\", 60)), (\"CONT_2_MAIL\", (\"char\", 60)), (\"LICENSE\",", "100)), (\"PROTECTED_AREA\", (\"char\", 50)), (\"AREA\", (\"double\",)), (\"BIOME_MAP\", (\"char\", 500)) ]", "https://doi.org/10.1111/geb.12729.\" self.description = \"The BioTIME database has species identities and", "(\"CONTACT_1\", (\"char\", 500)), (\"CONTACT_2\", (\"char\", 500)), (\"CONT_1_MAIL\", (\"char\", 60)), (\"CONT_2_MAIL\",", "# Create species table table = Table(\"species\", delimiter=\",\", header_rows=0, contains_pk=False)", "# Create biomass table table = Table(\"biomass\", delimiter=\",\", header_rows=0, contains_pk=False)", "(\"STUDY_ID\", (\"int\",)), (\"CITATION_LINE\", (\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"citation1.csv\"))", "header_rows=0, contains_pk=False) table.columns = [(\"COUNT_ID\", (\"int\",)), (\"COUNTRY_NAME\", (\"char\", 200))] engine.table", "table \" if line.startswith(table_indicator): st = line[len(table_indicator):].replace(\"`\", \"\") table_name =", "self.urls = { \"sql_file\": \"https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1\", } self.version = \"1.0.1\" self.ref", "Global Ecology & Biogeography. 2018; 00:1 - 26. https://doi.org/10.1111/geb.12729.\" self.description", "contains_pk=False) table.columns = [ (\"ID_SITE\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"REALM\", (\"char\",", "sample table table = Table(\"sample\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "= row_val.replace(\"\\r\\n\",\"\").split(\"),(\") for i_row in table_rows: v = eval('[' +", "st.strip() current_file_process = table_name current_file_open = current_file_process if set_open and", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"citation1.csv\")) # Create contacts table table = Table(\"contacts\", delimiter=\",\",", "= True if line.startswith(\"INSERT INTO `{table_name}`\".format(table_name=table_name)): row_val = line[line.index(\"VALUES (\")", "self.title self.tags = self.keywords def download(self, engine=None, debug=False): Script.download(self, engine,", "abundances in ecological assemblages through time.\" self.keywords = [\"Time series\",", "= Table(\"countries\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [(\"COUNT_ID\", (\"int\",)), (\"COUNTRY_NAME\",", "csv_file = open_fw(engine.format_filename(out_file)) csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL) set_open = True", "table.columns = [ (\"ID_SITE\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"REALM\", (\"char\", 11)),", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"countries.csv\")) # Create curation table table = Table(\"curation\",", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"downloads.csv\")) # Create methods table table =", "countries table table = Table(\"countries\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns =", "retriever import open_fr, open_fw, VERSION class main(Script): def __init__(self, **kwargs):", "structure for table \" if line.startswith(table_indicator): st = line[len(table_indicator):].replace(\"`\", \"\")", "of biodiversity time series for the Anthropocene. Global Ecology &", "parse_version(\"2.0.0\"): self.shortname = self.name self.name = self.title self.tags = self.keywords", "st = line[len(table_indicator):].replace(\"`\", \"\") table_name = st.strip() current_file_process = table_name", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"datasets.csv\")) # Create downloads table table = Table(\"downloads\",", "(\"char\", 20)), (\"GENERAL_TREAT\", (\"char\", 200)), (\"TREATMENT\", (\"char\", 200)), (\"TREAT_COMMENTS\", (\"char\",", "[ (\"ID_SITE\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"REALM\", (\"char\", 11)), (\"CLIMATE\", (\"char\",", "contains_pk=False) table.columns = [ (\"D_ID\", (\"int\",)), (\"STUDY\", (\"char\", 25)), (\"NAME\",", "\"BioTIMESQL02_04_2018.sql\" engine.download_file(self.urls[\"sql_file\"], original_sql_file) sql_data = open_fr(self.engine.format_filename(original_sql_file)) set_open = False csv_writer", "table = Table(\"countries\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [(\"COUNT_ID\", (\"int\",)),", "v = eval('[' + str(i_row) + ']') csv_writer.writerows([v]) if csv_file:", "(\"int\",)), (\"STUDY_ID\", (\"int\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"allrawdata.csv\")) #", "(\"D_ID\", (\"int\",)), (\"STUDY\", (\"char\", 25)), (\"NAME\", (\"char\", 150)), (\"EMAIL\", (\"char\",", "engine.insert_data_from_file(engine.format_filename(\"site.csv\")) # Create species table table = Table(\"species\", delimiter=\",\", header_rows=0,", "(\"WEB_LINK\", (\"char\", 200)), (\"DATA_SOURCE\", (\"char\", 250)), ] engine.table = table", "(\"int\",)), (\"COUNTRY_NAME\", (\"char\", 200))] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"countries.csv\")) #", "[ (\"ID_CITATION1\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CITATION_LINE\", (\"char\",)), ] engine.table =", "set_open and not current_file_process == current_file_open: csv_file.close() set_open = False", "parse_version from retriever.lib.models import Table from retriever.lib.templates import Script try:", "contains_pk=False) table.columns = [ (\"ID_METHODS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"METHODS\", (\"char\",)),", "contains_pk=False) table.columns = [ (\"ID_SAMPLE\", (\"int\",)), (\"ID_TREAT\", (\"int\",)), (\"SAMPLE_DESC_NAME\", (\"char\",", "(\"char\", 100)), (\"SPECIES\", (\"char\", 100)), (\"GENUS_SPECIES\", (\"char\", 100)) ] engine.table", "from pkg_resources import parse_version from retriever.lib.models import Table from retriever.lib.templates", "Table(\"biomass\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [(\"ID_BIOMASS\", (\"int\",)), (\"BIOMASS_TYPE\", (\"char\",", "] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"downloads.csv\")) # Create methods table", "250)), (\"TREAT_DATE\", (\"char\", 100)), (\"CEN_LATITUDE\", (\"double\",)), (\"CEN_LONGITUDE\", (\"double\",)), (\"HABITAT\", (\"char\",", "engine.insert_data_from_file(engine.format_filename(\"datasets.csv\")) # Create downloads table table = Table(\"downloads\", delimiter=\",\", header_rows=0,", "(\"LONGITUDE\", (\"double\",)), (\"DEPTH\", (\"double\",)), (\"DAY\", (\"int\",)), (\"MONTH\", (\"int\",)), (\"YEAR\", (\"int\",)),", "series for the Anthropocene. Global Ecology & Biogeography. 2018; 00:1", "table.columns = [ (\"ID_CONTACTS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"CONTACT_1\", (\"char\", 500)),", "= eval('[' + str(i_row) + ']') csv_writer.writerows([v]) if csv_file: csv_file.close()", "not current_file_process == current_file_open: csv_file.close() set_open = False else: out_file", "] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"citation1.csv\")) # Create contacts table", "(\"ID_METHODS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"METHODS\", (\"char\",)), (\"SUMMARY_METHODS\", (\"char\", 500)), ]", "header_rows=0, contains_pk=False) table.columns = [ (\"ID_SAMPLE\", (\"int\",)), (\"ID_TREAT\", (\"int\",)), (\"SAMPLE_DESC_NAME\",", "True if line.startswith(\"INSERT INTO `{table_name}`\".format(table_name=table_name)): row_val = line[line.index(\"VALUES (\") +", "200)), (\"TITLE\", (\"char\",800)), (\"AB_BIO\", (\"char\", 2)), (\"HAS_PLOT\", (\"char\", 10)), (\"DATA_POINTS\",", "Data by Product, Country/Association\" self.name = \"biotimesql\" self.retriever_minimum_version = \"2.2.0\"", "line.startswith(\"INSERT INTO `{table_name}`\".format(table_name=table_name)): row_val = line[line.index(\"VALUES (\") + 8:-3] table_rows", "(\"SAMPLE_DESC\", (\"char\", 200)), (\"PLOT\", (\"char\", 150)), (\"LATITUDE\", (\"double\",)), (\"LONGITUDE\", (\"double\",)),", "= [ (\"D_ID\", (\"int\",)), (\"STUDY\", (\"char\", 25)), (\"NAME\", (\"char\", 150)),", "= [ (\"ID_SITE\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"REALM\", (\"char\", 11)), (\"CLIMATE\",", "= Table(\"ID_ABUNDANCE\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_ABUNDANCE\", (\"int\",)),", "identities and abundances in ecological assemblages through time.\" self.keywords =", "(\"char\", 250)), (\"TREAT_DATE\", (\"char\", 100)), (\"CEN_LATITUDE\", (\"double\",)), (\"CEN_LONGITUDE\", (\"double\",)), (\"HABITAT\",", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"contacts.csv\")) # Create countries table table = Table(\"countries\", delimiter=\",\",", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [(\"ID_BIOMASS\", (\"int\",)), (\"BIOMASS_TYPE\", (\"char\", \"100\"))]", "(\"STUDY_ID\", (\"int\",)), (\"METHODS\", (\"char\",)), (\"SUMMARY_METHODS\", (\"char\", 500)), ] engine.table =", "current_file_process if set_open and not current_file_process == current_file_open: csv_file.close() set_open", "self.ref = \"https://zenodo.org/record/1095628#.WskN7dPwYyn\" self.citation = \"<NAME>, <NAME>, <NAME>, et al.", "if csv_file: csv_file.close() # Create abundance table table = Table(\"ID_ABUNDANCE\",", "(\"DATE_STAMP\", (\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"downloads.csv\")) # Create", "engine, debug) engine = self.engine original_sql_file = \"BioTIMESQL02_04_2018.sql\" engine.download_file(self.urls[\"sql_file\"], original_sql_file)", "(\"double\",)), (\"LONGITUDE\", (\"double\",)), (\"DEPTH\", (\"double\",)), (\"DAY\", (\"int\",)), (\"MONTH\", (\"int\",)), (\"YEAR\",", "open_csvw except ImportError: from retriever.lib.scripts import open_fr, open_fw except ImportError:", "(\"AREA\", (\"double\",)), (\"BIOME_MAP\", (\"char\", 500)) ] engine.table = table engine.create_table()", "self.tags = self.keywords def download(self, engine=None, debug=False): Script.download(self, engine, debug)", "open_fr, open_fw, open_csvw except ImportError: from retriever.lib.scripts import open_fr, open_fw", "table = Table(\"allrawdata\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_ALL_RAW_DATA\",", "(\"int\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"allrawdata.csv\")) # Create biomass", "[ (\"ID_SPECIES\", (\"int\",)), (\"GENUS\", (\"char\", 100)), (\"SPECIES\", (\"char\", 100)), (\"GENUS_SPECIES\",", "table table = Table(\"countries\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [(\"COUNT_ID\",", "= open_fr(self.engine.format_filename(original_sql_file)) set_open = False csv_writer = None csv_file =", "csv_file.close() set_open = False else: out_file = \"{name}.csv\".format(name=table_name) csv_file =", "engine.insert_data_from_file(engine.format_filename(\"allrawdata.csv\")) # Create biomass table table = Table(\"biomass\", delimiter=\",\", header_rows=0,", "200))] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"countries.csv\")) # Create curation table", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"methods.csv\")) # Create sample table table = Table(\"sample\", delimiter=\",\",", "engine.create_table() engine.insert_data_from_file(engine.format_filename(\"site.csv\")) # Create species table table = Table(\"species\", delimiter=\",\",", "= line[line.index(\"VALUES (\") + 8:-3] table_rows = row_val.replace(\"\\r\\n\",\"\").split(\"),(\") for i_row", "10)), (\"DATA_POINTS\", (\"char\",)), (\"START_YEAR\", (\"char\",)), (\"END_YEAR\", (\"char\",)), (\"CENT_LAT\", (\"double\",)), (\"CENT_LONG\",", "debug=False): Script.download(self, engine, debug) engine = self.engine original_sql_file = \"BioTIMESQL02_04_2018.sql\"", "if set_open and not current_file_process == current_file_open: csv_file.close() set_open =", "csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL) set_open = True if line.startswith(\"INSERT INTO", "= table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"sample.csv\")) # Create site table table =", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_DATASETS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)),", "open_fw(engine.format_filename(out_file)) csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL) set_open = True if line.startswith(\"INSERT", "BY 4.0\"}] self.encoding = \"latin1\" if parse_version(VERSION) <= parse_version(\"2.0.0\"): self.shortname", "table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"methods.csv\")) # Create sample table table = Table(\"sample\",", "Table(\"datasets\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_DATASETS\", (\"int\",)), (\"STUDY_ID\",", "(\"char\", 500)) ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"site.csv\")) # Create", "table_rows: v = eval('[' + str(i_row) + ']') csv_writer.writerows([v]) if", "(\"char\", 250)), (\"DATE_STAMP\", (\"char\",)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"downloads.csv\"))", "if parse_version(VERSION) <= parse_version(\"2.0.0\"): self.shortname = self.name self.name = self.title", "(\"char\",)), (\"DATE_STUDY_ADDED\", (\"char\", 50)), ] engine.table = table engine.create_table() engine.insert_data_from_file(engine.format_filename(\"curation.csv\"))", "(\"NUMBER_LAT_LONG\", (\"char\",)), (\"TOTAL\", (\"char\",)), (\"GRAIN_SIZE_TEXT\", (\"char\",)), (\"GRAIN_SQ_KM\", (\"double\",)), (\"AREA_SQ_KM\", (\"double\",)),", "self.keywords = [\"Time series\", \"Anthropocene\", \"Global\"] self.licenses = [{\"name\": \"CC", "= Table(\"citation1\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CITATION1\", (\"int\",)),", "(\"char\", 50)), (\"ORGANISMS\", (\"char\", 200)), (\"TITLE\", (\"char\",800)), (\"AB_BIO\", (\"char\", 2)),", "for i_row in table_rows: v = eval('[' + str(i_row) +", "table.columns = [ (\"ID_CURATION\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"LINK_ID\", (\"int\",)), (\"COMMENTS\",", "(\"CLIMATE\", (\"char\", 20)), (\"GENERAL_TREAT\", (\"char\", 200)), (\"TREATMENT\", (\"char\", 200)), (\"TREAT_COMMENTS\",", "set_open = False csv_writer = None csv_file = None table_name", "header_rows=0, contains_pk=False) table.columns = [ (\"ID_CURATION\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"LINK_ID\",", ")), (\"BIO_TYPE\", (\"char\",)), (\"SAMPLE_TYPE\", (\"char\",)), ] engine.table = table engine.create_table()", "& Biogeography. 2018; 00:1 - 26. https://doi.org/10.1111/geb.12729.\" self.description = \"The", "header_rows=0, contains_pk=False) table.columns = [ (\"ID_DATASETS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)), (\"TAXA\",", "BioTIME: A database of biodiversity time series for the Anthropocene.", "(\"LINK_ID\", (\"int\",)), (\"COMMENTS\", (\"char\",)), (\"DATE_STUDY_ADDED\", (\"char\", 50)), ] engine.table =", "and abundances in ecological assemblages through time.\" self.keywords = [\"Time", "table table = Table(\"ID_ABUNDANCE\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [", "table table = Table(\"citation1\", delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [", "2018; 00:1 - 26. https://doi.org/10.1111/geb.12729.\" self.description = \"The BioTIME database", "delimiter=\",\", header_rows=0, contains_pk=False) table.columns = [ (\"ID_CONTACTS\", (\"int\",)), (\"STUDY_ID\", (\"int\",)),", "500)), (\"LOCATION\", (\"char\", 250)), (\"DATE_STAMP\", (\"char\",)), ] engine.table = table", "(\"STUDY_ID\", (\"int\",)), (\"REALM\", (\"char\", 11)), (\"CLIMATE\", (\"char\", 20)), (\"GENERAL_TREAT\", (\"char\"," ]
[ "json from alipay.aop.api.constant.ParamConstants import * class KbAdvertSettleBillResponse(object): def __init__(self): self._download_url", "= d['download_url'] if 'paid_date' in d: o.paid_date = d['paid_date'] return", "from alipay.aop.api.constant.ParamConstants import * class KbAdvertSettleBillResponse(object): def __init__(self): self._download_url =", "self.paid_date return params @staticmethod def from_alipay_dict(d): if not d: return", "KbAdvertSettleBillResponse(object): def __init__(self): self._download_url = None self._paid_date = None @property", "from_alipay_dict(d): if not d: return None o = KbAdvertSettleBillResponse() if", "self._download_url = None self._paid_date = None @property def download_url(self): return", "d: o.download_url = d['download_url'] if 'paid_date' in d: o.paid_date =", "def to_alipay_dict(self): params = dict() if self.download_url: if hasattr(self.download_url, 'to_alipay_dict'):", "-*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import *", "= self.paid_date return params @staticmethod def from_alipay_dict(d): if not d:", "@paid_date.setter def paid_date(self, value): self._paid_date = value def to_alipay_dict(self): params", "to_alipay_dict(self): params = dict() if self.download_url: if hasattr(self.download_url, 'to_alipay_dict'): params['download_url']", "self.paid_date: if hasattr(self.paid_date, 'to_alipay_dict'): params['paid_date'] = self.paid_date.to_alipay_dict() else: params['paid_date'] =", "return params @staticmethod def from_alipay_dict(d): if not d: return None", "params @staticmethod def from_alipay_dict(d): if not d: return None o", "= self.paid_date.to_alipay_dict() else: params['paid_date'] = self.paid_date return params @staticmethod def", "@property def paid_date(self): return self._paid_date @paid_date.setter def paid_date(self, value): self._paid_date", "o = KbAdvertSettleBillResponse() if 'download_url' in d: o.download_url = d['download_url']", "= value @property def paid_date(self): return self._paid_date @paid_date.setter def paid_date(self,", "__init__(self): self._download_url = None self._paid_date = None @property def download_url(self):", "download_url(self): return self._download_url @download_url.setter def download_url(self, value): self._download_url = value", "def from_alipay_dict(d): if not d: return None o = KbAdvertSettleBillResponse()", "alipay.aop.api.constant.ParamConstants import * class KbAdvertSettleBillResponse(object): def __init__(self): self._download_url = None", "hasattr(self.paid_date, 'to_alipay_dict'): params['paid_date'] = self.paid_date.to_alipay_dict() else: params['paid_date'] = self.paid_date return", "params['download_url'] = self.download_url.to_alipay_dict() else: params['download_url'] = self.download_url if self.paid_date: if", "'download_url' in d: o.download_url = d['download_url'] if 'paid_date' in d:", "value def to_alipay_dict(self): params = dict() if self.download_url: if hasattr(self.download_url,", "value): self._download_url = value @property def paid_date(self): return self._paid_date @paid_date.setter", "None o = KbAdvertSettleBillResponse() if 'download_url' in d: o.download_url =", "params['paid_date'] = self.paid_date.to_alipay_dict() else: params['paid_date'] = self.paid_date return params @staticmethod", "@property def download_url(self): return self._download_url @download_url.setter def download_url(self, value): self._download_url", "params = dict() if self.download_url: if hasattr(self.download_url, 'to_alipay_dict'): params['download_url'] =", "= dict() if self.download_url: if hasattr(self.download_url, 'to_alipay_dict'): params['download_url'] = self.download_url.to_alipay_dict()", "return None o = KbAdvertSettleBillResponse() if 'download_url' in d: o.download_url", "d['download_url'] if 'paid_date' in d: o.paid_date = d['paid_date'] return o", "not d: return None o = KbAdvertSettleBillResponse() if 'download_url' in", "python # -*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants", "@download_url.setter def download_url(self, value): self._download_url = value @property def paid_date(self):", "#!/usr/bin/env python # -*- coding: utf-8 -*- import json from", "def download_url(self, value): self._download_url = value @property def paid_date(self): return", "def download_url(self): return self._download_url @download_url.setter def download_url(self, value): self._download_url =", "self.download_url: if hasattr(self.download_url, 'to_alipay_dict'): params['download_url'] = self.download_url.to_alipay_dict() else: params['download_url'] =", "= KbAdvertSettleBillResponse() if 'download_url' in d: o.download_url = d['download_url'] if", "= None self._paid_date = None @property def download_url(self): return self._download_url", "None @property def download_url(self): return self._download_url @download_url.setter def download_url(self, value):", "if not d: return None o = KbAdvertSettleBillResponse() if 'download_url'", "o.download_url = d['download_url'] if 'paid_date' in d: o.paid_date = d['paid_date']", "coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import * class", "def __init__(self): self._download_url = None self._paid_date = None @property def", "download_url(self, value): self._download_url = value @property def paid_date(self): return self._paid_date", "import json from alipay.aop.api.constant.ParamConstants import * class KbAdvertSettleBillResponse(object): def __init__(self):", "self._download_url @download_url.setter def download_url(self, value): self._download_url = value @property def", "value @property def paid_date(self): return self._paid_date @paid_date.setter def paid_date(self, value):", "= self.download_url if self.paid_date: if hasattr(self.paid_date, 'to_alipay_dict'): params['paid_date'] = self.paid_date.to_alipay_dict()", "'to_alipay_dict'): params['paid_date'] = self.paid_date.to_alipay_dict() else: params['paid_date'] = self.paid_date return params", "-*- import json from alipay.aop.api.constant.ParamConstants import * class KbAdvertSettleBillResponse(object): def", "if 'download_url' in d: o.download_url = d['download_url'] if 'paid_date' in", "'to_alipay_dict'): params['download_url'] = self.download_url.to_alipay_dict() else: params['download_url'] = self.download_url if self.paid_date:", "else: params['paid_date'] = self.paid_date return params @staticmethod def from_alipay_dict(d): if", "self._paid_date = None @property def download_url(self): return self._download_url @download_url.setter def", "return self._download_url @download_url.setter def download_url(self, value): self._download_url = value @property", "<filename>alipay/aop/api/domain/KbAdvertSettleBillResponse.py #!/usr/bin/env python # -*- coding: utf-8 -*- import json", "utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import * class KbAdvertSettleBillResponse(object):", "class KbAdvertSettleBillResponse(object): def __init__(self): self._download_url = None self._paid_date = None", "KbAdvertSettleBillResponse() if 'download_url' in d: o.download_url = d['download_url'] if 'paid_date'", "in d: o.download_url = d['download_url'] if 'paid_date' in d: o.paid_date", "= None @property def download_url(self): return self._download_url @download_url.setter def download_url(self,", "paid_date(self, value): self._paid_date = value def to_alipay_dict(self): params = dict()", "hasattr(self.download_url, 'to_alipay_dict'): params['download_url'] = self.download_url.to_alipay_dict() else: params['download_url'] = self.download_url if", "@staticmethod def from_alipay_dict(d): if not d: return None o =", "if hasattr(self.paid_date, 'to_alipay_dict'): params['paid_date'] = self.paid_date.to_alipay_dict() else: params['paid_date'] = self.paid_date", "dict() if self.download_url: if hasattr(self.download_url, 'to_alipay_dict'): params['download_url'] = self.download_url.to_alipay_dict() else:", "import * class KbAdvertSettleBillResponse(object): def __init__(self): self._download_url = None self._paid_date", "def paid_date(self, value): self._paid_date = value def to_alipay_dict(self): params =", "* class KbAdvertSettleBillResponse(object): def __init__(self): self._download_url = None self._paid_date =", "self._paid_date @paid_date.setter def paid_date(self, value): self._paid_date = value def to_alipay_dict(self):", "if self.download_url: if hasattr(self.download_url, 'to_alipay_dict'): params['download_url'] = self.download_url.to_alipay_dict() else: params['download_url']", "# -*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import", "= value def to_alipay_dict(self): params = dict() if self.download_url: if", "return self._paid_date @paid_date.setter def paid_date(self, value): self._paid_date = value def", "else: params['download_url'] = self.download_url if self.paid_date: if hasattr(self.paid_date, 'to_alipay_dict'): params['paid_date']", "if hasattr(self.download_url, 'to_alipay_dict'): params['download_url'] = self.download_url.to_alipay_dict() else: params['download_url'] = self.download_url", "None self._paid_date = None @property def download_url(self): return self._download_url @download_url.setter", "= self.download_url.to_alipay_dict() else: params['download_url'] = self.download_url if self.paid_date: if hasattr(self.paid_date,", "def paid_date(self): return self._paid_date @paid_date.setter def paid_date(self, value): self._paid_date =", "self.paid_date.to_alipay_dict() else: params['paid_date'] = self.paid_date return params @staticmethod def from_alipay_dict(d):", "self._download_url = value @property def paid_date(self): return self._paid_date @paid_date.setter def", "value): self._paid_date = value def to_alipay_dict(self): params = dict() if", "self._paid_date = value def to_alipay_dict(self): params = dict() if self.download_url:", "paid_date(self): return self._paid_date @paid_date.setter def paid_date(self, value): self._paid_date = value", "params['paid_date'] = self.paid_date return params @staticmethod def from_alipay_dict(d): if not", "self.download_url if self.paid_date: if hasattr(self.paid_date, 'to_alipay_dict'): params['paid_date'] = self.paid_date.to_alipay_dict() else:", "if self.paid_date: if hasattr(self.paid_date, 'to_alipay_dict'): params['paid_date'] = self.paid_date.to_alipay_dict() else: params['paid_date']", "d: return None o = KbAdvertSettleBillResponse() if 'download_url' in d:", "params['download_url'] = self.download_url if self.paid_date: if hasattr(self.paid_date, 'to_alipay_dict'): params['paid_date'] =", "self.download_url.to_alipay_dict() else: params['download_url'] = self.download_url if self.paid_date: if hasattr(self.paid_date, 'to_alipay_dict'):" ]
[ "opcode is 8, carry out equality comparison and store 1/0", "Running Intcode program looks reads in the integers sequentially in", "given by third param, otherwise store 0 at position given", "4: data[i] == Parameter Mode + Opcode (last two digits)", "#print(program) result1, answer1 = run_intcode(program, 1) #print(result1) print(\"Part 1: Answer", "Opcode == 7 and entry 1> entry 2, store 1", "Opcode == 5 and entry 1 is != 0, the", "0, jump forward elif opcode == 6: if params[0] ==", "opcode == 6: if params[0] == 0: i = params[1]", "if input = 0, 1 otherwise #test_program10 = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0, #36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20,", "== 6 and entry 1 is 0, the intcode postion", "input value at required location. elif opcode == 3: data[data[i+1]]", "\"\"\" data = program[:] answer = -1 params = [0,", "the single integer (input) is saved to the position given", "is completed and will stop running. Parameters are digits to", "ended by halt code\") break # If opcode is anything", "jump forward elif opcode == 5: if params[0] != 0:", "1/0 at loc 3 elif opcode == 7: if params[0]", "8, 1000 if input = 8, 1001 if input >", "is 8, carry out equality comparison and store 1/0 at", "at index location = entry 1 and 2 in the", "\"{:0>5d}\".format(data[i]) opcode = int(opcode_str[3:]) param_modes[0] = opcode_str[2] param_modes[1] = opcode_str[1]", "#test_program3 = [1101,100,-1,4,0] #test_program4 = [3,9,8,9,10,9,4,9,99,-1,8] # 1 if input", "and converts to ints \"\"\" data = [] f =", "the index location of entry 3. If Opcode == 2,", "program outputs the value of its only parameter. E.g. 4,50", "third param, otherwise store 0 at position given by third", "the integers sequentially in sets of 4: data[i] == Parameter", "treated as an index location Parameter 1 -> Immediate mode", "except IndexError: continue else: try: params[j] = data[i+j+1] except IndexError:", "is 5 and the next parameter !=0, jump forward elif", "params[1] else: i += 3 # If the opcode is", "# 1 if input < 8, 0 otherwise #test_program6 =", "3: data[data[i+1]] = input_int i += 2; # If opcode", "parameter is 0, jump forward elif opcode == 6: if", "data[data[i+j+1]] except IndexError: continue else: try: params[j] = data[i+j+1] except", "Entry 3 If Opcode == 1, the value of the", "value at address 50. If Opcode == 5 and entry", "converts to ints \"\"\" data = [] f = open(filename,", "If Opcode == 5 and entry 1 is != 0,", "-> Immediate mode - the entry is treated as a", "1 if input = 8, 0 otherwise #test_program7 = [3,3,1107,-1,8,3,4,3,99]", "less than comparison and store 1/0 at loc 3 elif", "== Entry 3 If Opcode == 1, the value of", "99, the program is completed and will stop running. Parameters", "entry 1 and 2 in the program are multiplied and", "opcode_str = \"{:0>5d}\".format(data[i]) opcode = int(opcode_str[3:]) param_modes[0] = opcode_str[2] param_modes[1]", "address 50. If Opcode == 5 and entry 1 is", "3 elif opcode == 7: if params[0] < params[1]: data[data[i+3]]", "at entry 2. Otherwise it does nothing. If Opcode ==", "1. If Opcode == 4, the program outputs the value", "is anything else something has gone wrong! else: print(\"Problem with", "1 is 0, the intcode postion moves to the index", "at address 50. If Opcode == 5 and entry 1", "Dec 2 11:06:59 2019 @author: Paul \"\"\" def read_data(filename): \"\"\"", "f.close() return int_data def run_intcode(program, input_int): \"\"\" Takes data, list", "\"\"\" Created on Mon Dec 2 11:06:59 2019 @author: Paul", "position moves to the index stored at entry 2. Otherwise", "param, otherwise store 0 at position given by third param.", "and 2 in the program are summed and stored at", "is 4, print out the input stored at specified location.", "= entry 1 and 2 in the program are multiplied", "If Opcode == 2, the value of the opcode at", "has been run. Running Intcode program looks reads in the", "Opcode == 3, the the single integer (input) is saved", "= run_intcode(program, 5) #print(result2) print(\"Part 2: Answer is: \", answer2)", "[3,3,1108,-1,8,3,4,3,99] # 1 if input = 8, 0 otherwise #test_program7", "the opcode is 6 and next parameter is 0, jump", "codes: opcode_str = \"{:0>5d}\".format(data[i]) opcode = int(opcode_str[3:]) param_modes[0] = opcode_str[2]", "print(\"Program ended by halt code\") break # If opcode is", "integers sequentially in sets of 4: data[i] == Parameter Mode", "< params[1]: data[data[i+3]] = 1 else: data[data[i+3]] = 0 i", "Immediate mode - the entry is treated as a value", "opcode == 2: data[data[i+3]] = params[0] * params[1] i +=", "If opcode is anything else something has gone wrong! else:", "-*- coding: utf-8 -*- \"\"\" Created on Mon Dec 2", "the opcode at index location = entry 1 and 2", "at loc 3 elif opcode == 7: if params[0] <", "position given by third param. If Opcode == 99, the", "= [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # 0 if input = 0, 1 otherwise", "after intcode program has been run. Running Intcode program looks", "loc 3 elif opcode == 8: if params[0] == params[1]:", "5 and the next parameter !=0, jump forward elif opcode", "params[j] = data[data[i+j+1]] except IndexError: continue else: try: params[j] =", "data[i+2] == Entry 2 data[i+3] == Entry 3 If Opcode", "'0': try: params[j] = data[data[i+j+1]] except IndexError: continue else: try:", "in data] f.close() return int_data def run_intcode(program, input_int): \"\"\" Takes", "= 8, 0 otherwise #test_program5 = [3,9,7,9,10,9,4,9,99,-1,8] # 1 if", "at position given by third param. If Opcode == 99,", "answer1 = run_intcode(program, 1) #print(result1) print(\"Part 1: Answer is: \",", "else: print(\"Problem with the Program\") break return data, answer program", "the next parameter !=0, jump forward elif opcode == 5:", "sets of 4: data[i] == Parameter Mode + Opcode (last", "does nothing. If Opcode == 6 and entry 1 is", "range(2): if param_modes[j] == '0': try: params[j] = data[data[i+j+1]] except", "0 i += 4 # If the opcode is 8,", "[3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # 0 if input = 0, 1 otherwise #test_program10", "7, carry out less than comparison and store 1/0 at", "input = 0, 1 otherwise #test_program9 = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # 0", "the opcode is 7, carry out less than comparison and", "and next parameter is 0, jump forward elif opcode ==", "params[0] * params[1] i += 4; # If opcode is", "# 0 if input = 0, 1 otherwise #test_program10 =", "with the Program\") break return data, answer program = read_data(\"day5input.txt\")", "If the opcode is 5 and the next parameter !=0,", "the opcode is 99, halt the intcode elif opcode ==", "are multiplied and stored at the index location of entry", "index location Parameter 1 -> Immediate mode - the entry", "Mode + Opcode (last two digits) data[i+1] == Entry 1", "[0, 0, 0] param_modes = ['', '', ''] i =", "== 1: data[data[i+3]] = params[0] + params[1] i += 4;", "i in data] f.close() return int_data def run_intcode(program, input_int): \"\"\"", "the left of the opcode, read left to right: Parameter", "+= 4 # If the opcode is 8, carry out", "@author: Paul \"\"\" def read_data(filename): \"\"\" Reads csv file into", "#test_program6 = [3,3,1108,-1,8,3,4,3,99] # 1 if input = 8, 0", "[] f = open(filename, 'r') for line in f: data", "the value of the opcode at index location = entry", "summed and stored at the index location of entry 3.", "store 0 at position given by third param. If Opcode", "for i in data] f.close() return int_data def run_intcode(program, input_int):", "50. If Opcode == 5 and entry 1 is !=", "* params[1] i += 4; # If opcode is 3,", "at loc 3 elif opcode == 8: if params[0] ==", "given by third param. If Opcode == 7 and entry", "answer2) #test_program = [1002,4,3,4,33] #test_program2 = [3,0,4,0,99] #test_program3 = [1101,100,-1,4,0]", "moves to the index stored at entry 2. Otherwise it", "data, answer program = read_data(\"day5input.txt\") #print(program) result1, answer1 = run_intcode(program,", "next parameter is 0, jump forward elif opcode == 6:", "['', '', ''] i = 0 while (i < len(program)):", "does nothing. If Opcode == 7 and entry 1> entry", "0 otherwise #test_program8 = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # 0 if input =", "run int_code on. Returns list of ints after intcode program", "halt code\") break # If opcode is anything else something", "If opcode is 4, print out the input stored at", "multiply the relevant entries: elif opcode == 2: data[data[i+3]] =", "+= 2; # If opcode is 4, print out the", "digits to the left of the opcode, read left to", "!= 0, the intcode position moves to the index stored", "run_intcode(program, 1) #print(result1) print(\"Part 1: Answer is: \", answer1) result2,", "location of entry 3. If Opcode == 2, the value", "2. Otherwise it does nothing. If Opcode == 6 and", "value of the opcode at index location = entry 1", "#test_program4 = [3,9,8,9,10,9,4,9,99,-1,8] # 1 if input = 8, 0", "0 while (i < len(program)): #print(\"i = \", i) #", "#test_program7 = [3,3,1107,-1,8,3,4,3,99] # 1 if input < 8, 0", "intcode postion moves to the index stored at entry 2.", "out equality comparison and store 1/0 at loc 3 elif", "for line in f: data += line.strip('\\n').split(',') int_data = [int(i)", "= params[0] * params[1] i += 4; # If opcode", "at the index location of entry 3. If Opcode ==", "Opcode == 1, the value of the opcode at index", "entry 3. If Opcode == 2, the value of the", "opcode is 7, carry out less than comparison and store", "opcode is 99, halt the intcode elif opcode == 99:", "< len(program)): #print(\"i = \", i) # Determine Opcode and", "= open(filename, 'r') for line in f: data += line.strip('\\n').split(',')", "Parameter Mode + Opcode (last two digits) data[i+1] == Entry", "to right: Parameter 0 -> Position mode - the entry", "multiplied and stored at the index location of entry 3.", "ints after intcode program has been run. Running Intcode program", "1 -> Immediate mode - the entry is treated as", "# If opcode is anything else something has gone wrong!", "the program is completed and will stop running. Parameters are", "result2, answer2 = run_intcode(program, 5) #print(result2) print(\"Part 2: Answer is:", "elif opcode == 7: if params[0] < params[1]: data[data[i+3]] =", "= [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # 0 if input = 0, 1 otherwise", "0] param_modes = ['', '', ''] i = 0 while", "next parameter !=0, jump forward elif opcode == 5: if", "otherwise #test_program6 = [3,3,1108,-1,8,3,4,3,99] # 1 if input = 8,", "+= 4; # If opcode is 3, store input value", "intcode program has been run. Running Intcode program looks reads", "try: params[j] = data[data[i+j+1]] except IndexError: continue else: try: params[j]", "if params[0] == 0: i = params[1] else: i +=", "3. If Opcode == 3, the the single integer (input)", "== 99, the program is completed and will stop running.", "param_modes[2] = opcode_str[0] #print(opcode_str) for j in range(2): if param_modes[j]", "index location = entry 1 and 2 in the program", "store input value at required location. elif opcode == 3:", "8, 0 otherwise #test_program6 = [3,3,1108,-1,8,3,4,3,99] # 1 if input", "[1002,4,3,4,33] #test_program2 = [3,0,4,0,99] #test_program3 = [1101,100,-1,4,0] #test_program4 = [3,9,8,9,10,9,4,9,99,-1,8]", "param. If Opcode == 7 and entry 1 = entry", "1> entry 2, store 1 in position given by third", "params[0] + params[1] i += 4; # If opcode is", "= ['', '', ''] i = 0 while (i <", "1 else: data[data[i+3]] = 0 i += 4 # If", "print(\"Part 1: Answer is: \", answer1) result2, answer2 = run_intcode(program,", "\"\"\" data = [] f = open(filename, 'r') for line", "If Opcode == 7 and entry 1 = entry 2,", "0, 0] param_modes = ['', '', ''] i = 0", "8, 0 otherwise #test_program5 = [3,9,7,9,10,9,4,9,99,-1,8] # 1 if input", "as an index location Parameter 1 -> Immediate mode -", "left of the opcode, read left to right: Parameter 0", "If opcode is 3, store input value at required location.", "2; # If opcode is 4, print out the input", "location = entry 1 and 2 in the program are", "'', ''] i = 0 while (i < len(program)): #print(\"i", "else: try: params[j] = data[i+j+1] except IndexError: continue #print(params, param_modes)", "is 2, multiply the relevant entries: elif opcode == 2:", "reads in the integers sequentially in sets of 4: data[i]", "\", i) # Determine Opcode and parameter codes: opcode_str =", "2 11:06:59 2019 @author: Paul \"\"\" def read_data(filename): \"\"\" Reads", "[3,9,8,9,10,9,4,9,99,-1,8] # 1 if input = 8, 0 otherwise #test_program5", "3. If Opcode == 2, the value of the opcode", "utf-8 -*- \"\"\" Created on Mon Dec 2 11:06:59 2019", "4 # If the opcode is 99, halt the intcode", "If the opcode is 99, halt the intcode elif opcode", "8, carry out equality comparison and store 1/0 at loc", "1 and 2 in the program are multiplied and stored", "1) #print(result1) print(\"Part 1: Answer is: \", answer1) result2, answer2", "i += 4 # If the opcode is 8, carry", "otherwise #test_program8 = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # 0 if input = 0,", "0 if input = 0, 1 otherwise #test_program9 = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1]", "in sets of 4: data[i] == Parameter Mode + Opcode", "location. elif opcode == 3: data[data[i+1]] = input_int i +=", "out the input stored at specified location. elif opcode ==", "required location. elif opcode == 3: data[data[i+1]] = input_int i", "= entry 1 and 2 in the program are summed", "right: Parameter 0 -> Position mode - the entry is", "location Parameter 1 -> Immediate mode - the entry is", "99, halt the intcode elif opcode == 99: print(\"Program ended", "the index location of entry 3. If Opcode == 3,", "is: \", answer1) result2, answer2 = run_intcode(program, 5) #print(result2) print(\"Part", "IndexError: continue else: try: params[j] = data[i+j+1] except IndexError: continue", "store 1 in position given by third param, otherwise store", "intcode position moves to the index stored at entry 2.", "anything else something has gone wrong! else: print(\"Problem with the", "+= 3 # If the opcode is 6 and next", "If Opcode == 4, the program outputs the value of", "Entry 2 data[i+3] == Entry 3 If Opcode == 1,", "otherwise store 0 at position given by third param. If", "opcode is 6 and next parameter is 0, jump forward", "the intcode elif opcode == 99: print(\"Program ended by halt", "params[j] = data[i+j+1] except IndexError: continue #print(params, param_modes) # If", "If opcode is 2, multiply the relevant entries: elif opcode", "== 7 and entry 1> entry 2, store 1 in", "and will stop running. Parameters are digits to the left", "5: if params[0] != 0: i = params[1] else: i", "halt the intcode elif opcode == 99: print(\"Program ended by", "= [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0, #36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20, #1105,1,46,98,99] # 999 if input < 8,", "== 3, the the single integer (input) is saved to", "3 # If the opcode is 7, carry out less", "completed and will stop running. Parameters are digits to the", "= opcode_str[0] #print(opcode_str) for j in range(2): if param_modes[j] ==", "in the program are multiplied and stored at the index", "5 and entry 1 is != 0, the intcode position", "and entry 1 is != 0, the intcode position moves", "#test_program9 = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # 0 if input = 0, 1", "file into a list, and converts to ints \"\"\" data", "answer program = read_data(\"day5input.txt\") #print(program) result1, answer1 = run_intcode(program, 1)", "answer = -1 params = [0, 0, 0] param_modes =", "= data[i+j+1] except IndexError: continue #print(params, param_modes) # If opcode", "1, add relevant entries: if opcode == 1: data[data[i+3]] =", "otherwise #test_program5 = [3,9,7,9,10,9,4,9,99,-1,8] # 1 if input < 8,", "read left to right: Parameter 0 -> Position mode -", "line.strip('\\n').split(',') int_data = [int(i) for i in data] f.close() return", "at specified location. elif opcode == 4: answer = data[data[i+1]]", "\"\"\" Reads csv file into a list, and converts to", "in f: data += line.strip('\\n').split(',') int_data = [int(i) for i", "3 elif opcode == 8: if params[0] == params[1]: data[data[i+3]]", "f = open(filename, 'r') for line in f: data +=", "Entry 1 data[i+2] == Entry 2 data[i+3] == Entry 3", "-1 params = [0, 0, 0] param_modes = ['', '',", "data[data[i+1]]) i += 2; # If the opcode is 5", "is 0, the intcode postion moves to the index stored", "elif opcode == 2: data[data[i+3]] = params[0] * params[1] i", "store 1/0 at loc 3 elif opcode == 8: if", "index location of entry 3. If Opcode == 3, the", "param_modes[0] = opcode_str[2] param_modes[1] = opcode_str[1] param_modes[2] = opcode_str[0] #print(opcode_str)", "#print(params, param_modes) # If opcode is 1, add relevant entries:", "2019 @author: Paul \"\"\" def read_data(filename): \"\"\" Reads csv file", "Paul \"\"\" def read_data(filename): \"\"\" Reads csv file into a", "continue else: try: params[j] = data[i+j+1] except IndexError: continue #print(params,", "data += line.strip('\\n').split(',') int_data = [int(i) for i in data]", "input = 8, 0 otherwise #test_program7 = [3,3,1107,-1,8,3,4,3,99] # 1", "stored at specified location. elif opcode == 4: answer =", "program are multiplied and stored at the index location of", "opcode, read left to right: Parameter 0 -> Position mode", "0 at position given by third param. If Opcode ==", "to the position given by index 1. If Opcode ==", "1 in position given by third param, otherwise store 0", "position given by index 1. If Opcode == 4, the", "opcode == 1: data[data[i+3]] = params[0] + params[1] i +=", "data[data[i+3]] = 1 else: data[data[i+3]] = 0 i += 4", "input_int): \"\"\" Takes data, list of ints to run int_code", "the program outputs the value of its only parameter. E.g.", "elif opcode == 5: if params[0] != 0: i =", "in the integers sequentially in sets of 4: data[i] ==", "input < 8, 1000 if input = 8, 1001 if", "Opcode == 4, the program outputs the value of its", "i += 4; # If opcode is 2, multiply the", "Opcode == 6 and entry 1 is 0, the intcode", "= 1 else: data[data[i+3]] = 0 i += 4 #", "opcode_str[1] param_modes[2] = opcode_str[0] #print(opcode_str) for j in range(2): if", "and store 1/0 at loc 3 elif opcode == 8:", "\", answer2) #test_program = [1002,4,3,4,33] #test_program2 = [3,0,4,0,99] #test_program3 =", "entry is treated as a value \"\"\" data = program[:]", "by third param. If Opcode == 7 and entry 1", "entries: elif opcode == 2: data[data[i+3]] = params[0] * params[1]", "i += 4; # If opcode is 3, store input", "# If the opcode is 6 and next parameter is", "if opcode == 1: data[data[i+3]] = params[0] + params[1] i", "# If opcode is 3, store input value at required", "the entry is treated as a value \"\"\" data =", "0: i = params[1] else: i += 3 # If", "carry out less than comparison and store 1/0 at loc", "to run int_code on. Returns list of ints after intcode", "i += 3 # If the opcode is 7, carry", "result1, answer1 = run_intcode(program, 1) #print(result1) print(\"Part 1: Answer is:", "= [1101,100,-1,4,0] #test_program4 = [3,9,8,9,10,9,4,9,99,-1,8] # 1 if input =", "if input < 8, 1000 if input = 8, 1001", "< 8, 1000 if input = 8, 1001 if input", "specified location. elif opcode == 4: answer = data[data[i+1]] print(\"Program", "relevant entries: elif opcode == 2: data[data[i+3]] = params[0] *", "postion moves to the index stored at entry 2. Otherwise", "the intcode position moves to the index stored at entry", "nothing. If Opcode == 6 and entry 1 is 0,", "[3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0, #36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20, #1105,1,46,98,99] # 999 if input < 8, 1000", "except IndexError: continue #print(params, param_modes) # If opcode is 1,", "== Entry 1 data[i+2] == Entry 2 data[i+3] == Entry", "2: data[data[i+3]] = params[0] * params[1] i += 4; #", "the intcode postion moves to the index stored at entry", "- the entry is treated as a value \"\"\" data", "would output the value at address 50. If Opcode ==", "= opcode_str[2] param_modes[1] = opcode_str[1] param_modes[2] = opcode_str[0] #print(opcode_str) for", "jump forward elif opcode == 6: if params[0] == 0:", "data = [] f = open(filename, 'r') for line in", "wrong! else: print(\"Problem with the Program\") break return data, answer", "index location of entry 3. If Opcode == 2, the", "2 in the program are summed and stored at the", "Returns list of ints after intcode program has been run.", "if params[0] < params[1]: data[data[i+3]] = 1 else: data[data[i+3]] =", "!= 0: i = params[1] else: i += 3 #", "carry out equality comparison and store 1/0 at loc 3", "[3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # 0 if input = 0, 1 otherwise #test_program9", "If the opcode is 7, carry out less than comparison", "of entry 3. If Opcode == 3, the the single", "param. If Opcode == 99, the program is completed and", "i = params[1] else: i += 3 # If the", "elif opcode == 3: data[data[i+1]] = input_int i += 2;", "opcode == 8: if params[0] == params[1]: data[data[i+3]] = 1", "and entry 1> entry 2, store 1 in position given", "read_data(filename): \"\"\" Reads csv file into a list, and converts", "open(filename, 'r') for line in f: data += line.strip('\\n').split(',') int_data", "6 and entry 1 is 0, the intcode postion moves", "Parameters are digits to the left of the opcode, read", "by index 1. If Opcode == 4, the program outputs", "== 0: i = params[1] else: i += 3 #", "999 if input < 8, 1000 if input = 8,", "5) #print(result2) print(\"Part 2: Answer is: \", answer2) #test_program =", "print out the input stored at specified location. elif opcode", "of its only parameter. E.g. 4,50 would output the value", "as a value \"\"\" data = program[:] answer = -1", "#test_program = [1002,4,3,4,33] #test_program2 = [3,0,4,0,99] #test_program3 = [1101,100,-1,4,0] #test_program4", "if input = 8, 0 otherwise #test_program7 = [3,3,1107,-1,8,3,4,3,99] #", "left to right: Parameter 0 -> Position mode - the", "7 and entry 1 = entry 2, store 1 in", "7: if params[0] < params[1]: data[data[i+3]] = 1 else: data[data[i+3]]", "of ints after intcode program has been run. Running Intcode", "the the single integer (input) is saved to the position", "Opcode == 2, the value of the opcode at index", "in range(2): if param_modes[j] == '0': try: params[j] = data[data[i+j+1]]", "else something has gone wrong! else: print(\"Problem with the Program\")", "try: params[j] = data[i+j+1] except IndexError: continue #print(params, param_modes) #", "read_data(\"day5input.txt\") #print(program) result1, answer1 = run_intcode(program, 1) #print(result1) print(\"Part 1:", "params[1] i += 4; # If opcode is 3, store", "list of ints after intcode program has been run. Running", "Position mode - the entry is treated as an index", "== Parameter Mode + Opcode (last two digits) data[i+1] ==", "= params[1] else: i += 3 # If the opcode", "the entry is treated as an index location Parameter 1", "1 data[i+2] == Entry 2 data[i+3] == Entry 3 If", "== 4: answer = data[data[i+1]] print(\"Program output: \", data[data[i+1]]) i", "is: \", answer2) #test_program = [1002,4,3,4,33] #test_program2 = [3,0,4,0,99] #test_program3", "the Program\") break return data, answer program = read_data(\"day5input.txt\") #print(program)", "opcode == 3: data[data[i+1]] = input_int i += 2; #", "opcode = int(opcode_str[3:]) param_modes[0] = opcode_str[2] param_modes[1] = opcode_str[1] param_modes[2]", "the opcode is 8, carry out equality comparison and store", "#print(result2) print(\"Part 2: Answer is: \", answer2) #test_program = [1002,4,3,4,33]", "== 3: data[data[i+1]] = input_int i += 2; # If", "int_data = [int(i) for i in data] f.close() return int_data", "# -*- coding: utf-8 -*- \"\"\" Created on Mon Dec", "ints to run int_code on. Returns list of ints after", "1/0 at loc 3 elif opcode == 8: if params[0]", "# 1 if input = 8, 0 otherwise #test_program5 =", "entry is treated as an index location Parameter 1 ->", "+ Opcode (last two digits) data[i+1] == Entry 1 data[i+2]", "answer2 = run_intcode(program, 5) #print(result2) print(\"Part 2: Answer is: \",", "opcode == 4: answer = data[data[i+1]] print(\"Program output: \", data[data[i+1]])", "entries: if opcode == 1: data[data[i+3]] = params[0] + params[1]", "i += 2; # If opcode is 4, print out", "entry 1 is 0, the intcode postion moves to the", "Program\") break return data, answer program = read_data(\"day5input.txt\") #print(program) result1,", "stop running. Parameters are digits to the left of the", "break # If opcode is anything else something has gone", "4, print out the input stored at specified location. elif", "8, 0 otherwise #test_program8 = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # 0 if input", "[int(i) for i in data] f.close() return int_data def run_intcode(program,", "def read_data(filename): \"\"\" Reads csv file into a list, and", "coding: utf-8 -*- \"\"\" Created on Mon Dec 2 11:06:59", "return int_data def run_intcode(program, input_int): \"\"\" Takes data, list of", "if params[0] == params[1]: data[data[i+3]] = 1 else: data[data[i+3]] =", "Answer is: \", answer2) #test_program = [1002,4,3,4,33] #test_program2 = [3,0,4,0,99]", "entry 3. If Opcode == 3, the the single integer", "the value of its only parameter. E.g. 4,50 would output", "len(program)): #print(\"i = \", i) # Determine Opcode and parameter", "relevant entries: if opcode == 1: data[data[i+3]] = params[0] +", "3, the the single integer (input) is saved to the", "answer = data[data[i+1]] print(\"Program output: \", data[data[i+1]]) i += 2;", "6: if params[0] == 0: i = params[1] else: i", "the program are summed and stored at the index location", "'r') for line in f: data += line.strip('\\n').split(',') int_data =", "== '0': try: params[j] = data[data[i+j+1]] except IndexError: continue else:", "data[data[i+3]] = params[0] + params[1] i += 4; # If", "gone wrong! else: print(\"Problem with the Program\") break return data,", "[3,3,1107,-1,8,3,4,3,99] # 1 if input < 8, 0 otherwise #test_program8", "int_data def run_intcode(program, input_int): \"\"\" Takes data, list of ints", "third param. If Opcode == 99, the program is completed", "1: Answer is: \", answer1) result2, answer2 = run_intcode(program, 5)", "the value at address 50. If Opcode == 5 and", "a list, and converts to ints \"\"\" data = []", "output: \", data[data[i+1]]) i += 2; # If the opcode", "= run_intcode(program, 1) #print(result1) print(\"Part 1: Answer is: \", answer1)", "params[0] != 0: i = params[1] else: i += 3", "list, and converts to ints \"\"\" data = [] f", "input = 8, 0 otherwise #test_program5 = [3,9,7,9,10,9,4,9,99,-1,8] # 1", "entry 1 is != 0, the intcode position moves to", "If Opcode == 7 and entry 1> entry 2, store", "int_code on. Returns list of ints after intcode program has", "break return data, answer program = read_data(\"day5input.txt\") #print(program) result1, answer1", "1, the value of the opcode at index location =", "data[data[i+3]] = 0 i += 4 # If the opcode", "E.g. 4,50 would output the value at address 50. If", "# 1 if input = 8, 0 otherwise #test_program7 =", "will stop running. Parameters are digits to the left of", "int(opcode_str[3:]) param_modes[0] = opcode_str[2] param_modes[1] = opcode_str[1] param_modes[2] = opcode_str[0]", "stored at the index location of entry 3. If Opcode", "loc 3 elif opcode == 7: if params[0] < params[1]:", "11:06:59 2019 @author: Paul \"\"\" def read_data(filename): \"\"\" Reads csv", "# If opcode is 4, print out the input stored", "4; # If opcode is 2, multiply the relevant entries:", "by halt code\") break # If opcode is anything else", "otherwise #test_program10 = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0, #36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20, #1105,1,46,98,99] # 999 if input", "Takes data, list of ints to run int_code on. Returns", "treated as a value \"\"\" data = program[:] answer =", "input < 8, 0 otherwise #test_program8 = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # 0", "run_intcode(program, input_int): \"\"\" Takes data, list of ints to run", "two digits) data[i+1] == Entry 1 data[i+2] == Entry 2", "If Opcode == 3, the the single integer (input) is", "it does nothing. If Opcode == 6 and entry 1", "program is completed and will stop running. Parameters are digits", "if param_modes[j] == '0': try: params[j] = data[data[i+j+1]] except IndexError:", "has gone wrong! else: print(\"Problem with the Program\") break return", "params[1] i += 4; # If opcode is 2, multiply", "= [3,0,4,0,99] #test_program3 = [1101,100,-1,4,0] #test_program4 = [3,9,8,9,10,9,4,9,99,-1,8] # 1", "8, 0 otherwise #test_program7 = [3,3,1107,-1,8,3,4,3,99] # 1 if input", "program has been run. Running Intcode program looks reads in", "Opcode (last two digits) data[i+1] == Entry 1 data[i+2] ==", "the program are multiplied and stored at the index location", "run. Running Intcode program looks reads in the integers sequentially", "#test_program2 = [3,0,4,0,99] #test_program3 = [1101,100,-1,4,0] #test_program4 = [3,9,8,9,10,9,4,9,99,-1,8] #", "#test_program8 = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # 0 if input = 0, 1", "line in f: data += line.strip('\\n').split(',') int_data = [int(i) for", "else: i += 3 # If the opcode is 7,", "1 otherwise #test_program10 = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0, #36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20, #1105,1,46,98,99] # 999 if", "is saved to the position given by index 1. If", "list of ints to run int_code on. Returns list of", "the position given by index 1. If Opcode == 4,", "if params[0] != 0: i = params[1] else: i +=", "elif opcode == 99: print(\"Program ended by halt code\") break", "= read_data(\"day5input.txt\") #print(program) result1, answer1 = run_intcode(program, 1) #print(result1) print(\"Part", "otherwise #test_program7 = [3,3,1107,-1,8,3,4,3,99] # 1 if input < 8,", "single integer (input) is saved to the position given by", "is 3, store input value at required location. elif opcode", "== 5: if params[0] != 0: i = params[1] else:", "0, the intcode position moves to the index stored at", "+= 4; # If opcode is 2, multiply the relevant", "the index stored at entry 2. Otherwise it does nothing.", "into a list, and converts to ints \"\"\" data =", "the opcode is 5 and the next parameter !=0, jump", "i += 3 # If the opcode is 6 and", "to ints \"\"\" data = [] f = open(filename, 'r')", "looks reads in the integers sequentially in sets of 4:", "equality comparison and store 1/0 at loc 3 elif opcode", "position given by third param. If Opcode == 7 and", "at required location. elif opcode == 3: data[data[i+1]] = input_int", "run_intcode(program, 5) #print(result2) print(\"Part 2: Answer is: \", answer2) #test_program", "- the entry is treated as an index location Parameter", "continue #print(params, param_modes) # If opcode is 1, add relevant", "f: data += line.strip('\\n').split(',') int_data = [int(i) for i in", "is != 0, the intcode position moves to the index", "== params[1]: data[data[i+3]] = 1 else: data[data[i+3]] = 0 i", "program looks reads in the integers sequentially in sets of", "data[data[i+1]] = input_int i += 2; # If opcode is", "if input = 0, 1 otherwise #test_program9 = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] #", "#1105,1,46,98,99] # 999 if input < 8, 1000 if input", "= [3,9,7,9,10,9,4,9,99,-1,8] # 1 if input < 8, 0 otherwise", "and 2 in the program are multiplied and stored at", "0, 1 otherwise #test_program10 = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0, #36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20, #1105,1,46,98,99] # 999", "program are summed and stored at the index location of", "a value \"\"\" data = program[:] answer = -1 params", "its only parameter. E.g. 4,50 would output the value at", "ints \"\"\" data = [] f = open(filename, 'r') for", "params[0] == params[1]: data[data[i+3]] = 1 else: data[data[i+3]] = 0", "(last two digits) data[i+1] == Entry 1 data[i+2] == Entry", "i += 4 # If the opcode is 99, halt", "integer (input) is saved to the position given by index", "0 -> Position mode - the entry is treated as", "are summed and stored at the index location of entry", "i) # Determine Opcode and parameter codes: opcode_str = \"{:0>5d}\".format(data[i])", "comparison and store 1/0 at loc 3 elif opcode ==", "code\") break # If opcode is anything else something has", "given by index 1. If Opcode == 4, the program", "= \", i) # Determine Opcode and parameter codes: opcode_str", "= data[data[i+j+1]] except IndexError: continue else: try: params[j] = data[i+j+1]", "given by third param. If Opcode == 99, the program", "#print(opcode_str) for j in range(2): if param_modes[j] == '0': try:", "if input = 8, 0 otherwise #test_program5 = [3,9,7,9,10,9,4,9,99,-1,8] #", "forward elif opcode == 6: if params[0] == 0: i", "# If opcode is 2, multiply the relevant entries: elif", "print(\"Problem with the Program\") break return data, answer program =", "input < 8, 0 otherwise #test_program6 = [3,3,1108,-1,8,3,4,3,99] # 1", "== 99: print(\"Program ended by halt code\") break # If", "nothing. If Opcode == 7 and entry 1> entry 2,", "params[1]: data[data[i+3]] = 1 else: data[data[i+3]] = 0 i +=", "Opcode == 99, the program is completed and will stop", "input stored at specified location. elif opcode == 4: answer", "2; # If the opcode is 5 and the next", "+= line.strip('\\n').split(',') int_data = [int(i) for i in data] f.close()", "2: Answer is: \", answer2) #test_program = [1002,4,3,4,33] #test_program2 =", "elif opcode == 6: if params[0] == 0: i =", "== Entry 2 data[i+3] == Entry 3 If Opcode ==", "== 7: if params[0] < params[1]: data[data[i+3]] = 1 else:", "4, the program outputs the value of its only parameter.", "param_modes[j] == '0': try: params[j] = data[data[i+j+1]] except IndexError: continue", "of entry 3. If Opcode == 2, the value of", "of 4: data[i] == Parameter Mode + Opcode (last two", "i += 2; # If the opcode is 5 and", "1 if input = 8, 0 otherwise #test_program5 = [3,9,7,9,10,9,4,9,99,-1,8]", "#print(\"i = \", i) # Determine Opcode and parameter codes:", "parameter !=0, jump forward elif opcode == 5: if params[0]", "params[0] < params[1]: data[data[i+3]] = 1 else: data[data[i+3]] = 0", "= 0, 1 otherwise #test_program10 = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0, #36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20, #1105,1,46,98,99] #", "2, store 1 in position given by third param, otherwise", "== 2, the value of the opcode at index location", "entry 2. Otherwise it does nothing. If Opcode == 7", "mode - the entry is treated as an index location", "# Determine Opcode and parameter codes: opcode_str = \"{:0>5d}\".format(data[i]) opcode", "Determine Opcode and parameter codes: opcode_str = \"{:0>5d}\".format(data[i]) opcode =", "If Opcode == 1, the value of the opcode at", "4; # If opcode is 3, store input value at", "opcode_str[0] #print(opcode_str) for j in range(2): if param_modes[j] == '0':", "data, list of ints to run int_code on. Returns list", "while (i < len(program)): #print(\"i = \", i) # Determine", "by third param. If Opcode == 99, the program is", "parameter codes: opcode_str = \"{:0>5d}\".format(data[i]) opcode = int(opcode_str[3:]) param_modes[0] =", "= [0, 0, 0] param_modes = ['', '', ''] i", "def run_intcode(program, input_int): \"\"\" Takes data, list of ints to", "Parameter 1 -> Immediate mode - the entry is treated", "store 1/0 at loc 3 elif opcode == 7: if", "input_int i += 2; # If opcode is 4, print", "if input < 8, 0 otherwise #test_program6 = [3,3,1108,-1,8,3,4,3,99] #", "is 0, jump forward elif opcode == 6: if params[0]", "# 0 if input = 0, 1 otherwise #test_program9 =", "= params[0] + params[1] i += 4; # If opcode", "+= 4 # If the opcode is 99, halt the", "opcode is 2, multiply the relevant entries: elif opcode ==", "3 # If the opcode is 6 and next parameter", "== 7 and entry 1 = entry 2, store 1", "4: answer = data[data[i+1]] print(\"Program output: \", data[data[i+1]]) i +=", "7 and entry 1> entry 2, store 1 in position", "data[i] == Parameter Mode + Opcode (last two digits) data[i+1]", "by third param, otherwise store 0 at position given by", "elif opcode == 8: if params[0] == params[1]: data[data[i+3]] =", "data[i+1] == Entry 1 data[i+2] == Entry 2 data[i+3] ==", "been run. Running Intcode program looks reads in the integers", "opcode is anything else something has gone wrong! else: print(\"Problem", "param_modes = ['', '', ''] i = 0 while (i", "99: print(\"Program ended by halt code\") break # If opcode", "4 # If the opcode is 8, carry out equality", "csv file into a list, and converts to ints \"\"\"", "print(\"Part 2: Answer is: \", answer2) #test_program = [1002,4,3,4,33] #test_program2", "input = 0, 1 otherwise #test_program10 = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0, #36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20, #1105,1,46,98,99]", "opcode == 7: if params[0] < params[1]: data[data[i+3]] = 1", "== 6: if params[0] == 0: i = params[1] else:", "third param. If Opcode == 7 and entry 1 =", "of ints to run int_code on. Returns list of ints", "sequentially in sets of 4: data[i] == Parameter Mode +", "[3,9,7,9,10,9,4,9,99,-1,8] # 1 if input < 8, 0 otherwise #test_program6", "and store 1/0 at loc 3 elif opcode == 7:", "i = 0 while (i < len(program)): #print(\"i = \",", "data[data[i+3]] = params[0] * params[1] i += 4; # If", "Answer is: \", answer1) result2, answer2 = run_intcode(program, 5) #print(result2)", "and entry 1 = entry 2, store 1 in position", "forward elif opcode == 5: if params[0] != 0: i", "opcode at index location = entry 1 and 2 in", "output the value at address 50. If Opcode == 5", "than comparison and store 1/0 at loc 3 elif opcode", "param_modes) # If opcode is 1, add relevant entries: if", "== 5 and entry 1 is != 0, the intcode", "1 = entry 2, store 1 in position given by", "opcode is 1, add relevant entries: if opcode == 1:", "to the index stored at entry 2. Otherwise it does", "opcode == 99: print(\"Program ended by halt code\") break #", "something has gone wrong! else: print(\"Problem with the Program\") break", "is 7, carry out less than comparison and store 1/0", "= [1002,4,3,4,33] #test_program2 = [3,0,4,0,99] #test_program3 = [1101,100,-1,4,0] #test_program4 =", "intcode elif opcode == 99: print(\"Program ended by halt code\")", "is treated as a value \"\"\" data = program[:] answer", "data] f.close() return int_data def run_intcode(program, input_int): \"\"\" Takes data,", "Intcode program looks reads in the integers sequentially in sets", "and the next parameter !=0, jump forward elif opcode ==", "= -1 params = [0, 0, 0] param_modes = ['',", "# If the opcode is 7, carry out less than", "on. Returns list of ints after intcode program has been", "entry 1 = entry 2, store 1 in position given", "in the program are summed and stored at the index", "an index location Parameter 1 -> Immediate mode - the", "print(\"Program output: \", data[data[i+1]]) i += 2; # If the", "= 0, 1 otherwise #test_program9 = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # 0 if", "Mon Dec 2 11:06:59 2019 @author: Paul \"\"\" def read_data(filename):", "data[i+j+1] except IndexError: continue #print(params, param_modes) # If opcode is", "< 8, 0 otherwise #test_program8 = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # 0 if", "# If the opcode is 99, halt the intcode elif", "location. elif opcode == 4: answer = data[data[i+1]] print(\"Program output:", "out less than comparison and store 1/0 at loc 3", "entry 1> entry 2, store 1 in position given by", "in position given by third param, otherwise store 0 at", "1 if input < 8, 0 otherwise #test_program8 = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9]", "opcode is 5 and the next parameter !=0, jump forward", "1 is != 0, the intcode position moves to the", "8: if params[0] == params[1]: data[data[i+3]] = 1 else: data[data[i+3]]", "If Opcode == 99, the program is completed and will", "mode - the entry is treated as a value \"\"\"", "0 otherwise #test_program7 = [3,3,1107,-1,8,3,4,3,99] # 1 if input <", "params[0] == 0: i = params[1] else: i += 3", "on Mon Dec 2 11:06:59 2019 @author: Paul \"\"\" def", "= 0 i += 4 # If the opcode is", "for j in range(2): if param_modes[j] == '0': try: params[j]", "= data[data[i+1]] print(\"Program output: \", data[data[i+1]]) i += 2; #", "= [3,3,1107,-1,8,3,4,3,99] # 1 if input < 8, 0 otherwise", "index 1. If Opcode == 4, the program outputs the", "stored at entry 2. Otherwise it does nothing. If Opcode", "= [int(i) for i in data] f.close() return int_data def", "program = read_data(\"day5input.txt\") #print(program) result1, answer1 = run_intcode(program, 1) #print(result1)", "else: i += 3 # If the opcode is 6", "\"\"\" def read_data(filename): \"\"\" Reads csv file into a list,", "position given by third param, otherwise store 0 at position", "param_modes[1] = opcode_str[1] param_modes[2] = opcode_str[0] #print(opcode_str) for j in", "== 8: if params[0] == params[1]: data[data[i+3]] = 1 else:", "== 4, the program outputs the value of its only", "and entry 1 is 0, the intcode postion moves to", "(i < len(program)): #print(\"i = \", i) # Determine Opcode", "outputs the value of its only parameter. E.g. 4,50 would", "opcode == 5: if params[0] != 0: i = params[1]", "[1101,100,-1,4,0] #test_program4 = [3,9,8,9,10,9,4,9,99,-1,8] # 1 if input = 8,", "0 otherwise #test_program5 = [3,9,7,9,10,9,4,9,99,-1,8] # 1 if input <", "6 and next parameter is 0, jump forward elif opcode", "of the opcode, read left to right: Parameter 0 ->", "If the opcode is 8, carry out equality comparison and", "of the opcode at index location = entry 1 and", "#test_program5 = [3,9,7,9,10,9,4,9,99,-1,8] # 1 if input < 8, 0", "answer1) result2, answer2 = run_intcode(program, 5) #print(result2) print(\"Part 2: Answer", "0, 1 otherwise #test_program9 = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # 0 if input", "= opcode_str[1] param_modes[2] = opcode_str[0] #print(opcode_str) for j in range(2):", "Opcode and parameter codes: opcode_str = \"{:0>5d}\".format(data[i]) opcode = int(opcode_str[3:])", "= 0 while (i < len(program)): #print(\"i = \", i)", "# 999 if input < 8, 1000 if input =", "Otherwise it does nothing. If Opcode == 6 and entry", "and parameter codes: opcode_str = \"{:0>5d}\".format(data[i]) opcode = int(opcode_str[3:]) param_modes[0]", "entry 2, store 1 in position given by third param,", "Opcode == 7 and entry 1 = entry 2, store", "If the opcode is 6 and next parameter is 0,", "is 6 and next parameter is 0, jump forward elif", "1 otherwise #test_program9 = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # 0 if input =", "data[data[i+1]] print(\"Program output: \", data[data[i+1]]) i += 2; # If", "location of entry 3. If Opcode == 3, the the", "If Opcode == 6 and entry 1 is 0, the", "at position given by third param. If Opcode == 7", "-> Position mode - the entry is treated as an", "= \"{:0>5d}\".format(data[i]) opcode = int(opcode_str[3:]) param_modes[0] = opcode_str[2] param_modes[1] =", "the input stored at specified location. elif opcode == 4:", "#print(result1) print(\"Part 1: Answer is: \", answer1) result2, answer2 =", "2, multiply the relevant entries: elif opcode == 2: data[data[i+3]]", "and stored at the index location of entry 3. If", "program[:] answer = -1 params = [0, 0, 0] param_modes", "''] i = 0 while (i < len(program)): #print(\"i =", "If opcode is 1, add relevant entries: if opcode ==", "# If the opcode is 5 and the next parameter", "= 8, 0 otherwise #test_program7 = [3,3,1107,-1,8,3,4,3,99] # 1 if", "Created on Mon Dec 2 11:06:59 2019 @author: Paul \"\"\"", "j in range(2): if param_modes[j] == '0': try: params[j] =", "(input) is saved to the position given by index 1.", "digits) data[i+1] == Entry 1 data[i+2] == Entry 2 data[i+3]", "index stored at entry 2. Otherwise it does nothing. If", "add relevant entries: if opcode == 1: data[data[i+3]] = params[0]", "data = program[:] answer = -1 params = [0, 0,", "\", data[data[i+1]]) i += 2; # If the opcode is", "-*- \"\"\" Created on Mon Dec 2 11:06:59 2019 @author:", "Otherwise it does nothing. If Opcode == 7 and entry", "1: data[data[i+3]] = params[0] + params[1] i += 4; #", "= entry 2, store 1 in position given by third", "running. Parameters are digits to the left of the opcode,", "is 1, add relevant entries: if opcode == 1: data[data[i+3]]", "else: data[data[i+3]] = 0 i += 4 # If the", "value at required location. elif opcode == 3: data[data[i+1]] =", "# If the opcode is 8, carry out equality comparison", "= [3,9,8,9,10,9,4,9,99,-1,8] # 1 if input = 8, 0 otherwise", "entry 1 and 2 in the program are summed and", "only parameter. E.g. 4,50 would output the value at address", "otherwise #test_program9 = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # 0 if input = 0,", "\", answer1) result2, answer2 = run_intcode(program, 5) #print(result2) print(\"Part 2:", "2, the value of the opcode at index location =", "IndexError: continue #print(params, param_modes) # If opcode is 1, add", "[3,0,4,0,99] #test_program3 = [1101,100,-1,4,0] #test_program4 = [3,9,8,9,10,9,4,9,99,-1,8] # 1 if", "is treated as an index location Parameter 1 -> Immediate", "is 99, halt the intcode elif opcode == 99: print(\"Program", "2. Otherwise it does nothing. If Opcode == 7 and", "elif opcode == 4: answer = data[data[i+1]] print(\"Program output: \",", "opcode_str[2] param_modes[1] = opcode_str[1] param_modes[2] = opcode_str[0] #print(opcode_str) for j", "3, store input value at required location. elif opcode ==", "# 1 if input < 8, 0 otherwise #test_program8 =", "#36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20, #1105,1,46,98,99] # 999 if input < 8, 1000 if", "return data, answer program = read_data(\"day5input.txt\") #print(program) result1, answer1 =", "Parameter 0 -> Position mode - the entry is treated", "entry 2. Otherwise it does nothing. If Opcode == 6", "+ params[1] i += 4; # If opcode is 2,", "= [3,3,1108,-1,8,3,4,3,99] # 1 if input = 8, 0 otherwise", "1 and 2 in the program are summed and stored", "if input < 8, 0 otherwise #test_program8 = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] #", "0 if input = 0, 1 otherwise #test_program10 = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0,", "0 otherwise #test_program6 = [3,3,1108,-1,8,3,4,3,99] # 1 if input =", "0, the intcode postion moves to the index stored at", "data[i+3] == Entry 3 If Opcode == 1, the value", "value of its only parameter. E.g. 4,50 would output the", "parameter. E.g. 4,50 would output the value at address 50.", "= program[:] answer = -1 params = [0, 0, 0]", "#test_program10 = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0, #36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20, #1105,1,46,98,99] # 999 if input <", "= [] f = open(filename, 'r') for line in f:", "the relevant entries: elif opcode == 2: data[data[i+3]] = params[0]", "2 data[i+3] == Entry 3 If Opcode == 1, the", "\"\"\" Takes data, list of ints to run int_code on.", "# If opcode is 1, add relevant entries: if opcode", "0 i += 4 # If the opcode is 99,", "saved to the position given by index 1. If Opcode", "= input_int i += 2; # If opcode is 4,", "opcode is 4, print out the input stored at specified", "the opcode, read left to right: Parameter 0 -> Position", "1 if input < 8, 0 otherwise #test_program6 = [3,3,1108,-1,8,3,4,3,99]", "2 in the program are multiplied and stored at the", "params = [0, 0, 0] param_modes = ['', '', '']", "+= 3 # If the opcode is 7, carry out", "3 If Opcode == 1, the value of the opcode", "opcode is 3, store input value at required location. elif", "1000 if input = 8, 1001 if input > 8", "== 1, the value of the opcode at index location", "are digits to the left of the opcode, read left", "< 8, 0 otherwise #test_program6 = [3,3,1108,-1,8,3,4,3,99] # 1 if", "!=0, jump forward elif opcode == 5: if params[0] !=", "Reads csv file into a list, and converts to ints", "= int(opcode_str[3:]) param_modes[0] = opcode_str[2] param_modes[1] = opcode_str[1] param_modes[2] =", "value \"\"\" data = program[:] answer = -1 params =", "+= 2; # If the opcode is 5 and the", "it does nothing. If Opcode == 7 and entry 1>", "== 2: data[data[i+3]] = params[0] * params[1] i += 4;", "to the left of the opcode, read left to right:", "4,50 would output the value at address 50. If Opcode" ]
[ "CodeInstance(AbstractCodeInstance): user = models.ForeignKey(User) task_id = models.PositiveIntegerField() intensity = models.FloatField()", "= models.ForeignKey(Schema, related_name=\"codes\") code_type = models.IntegerField(default=0) def __unicode__(self): if self.description:", "return \"%s/%s (%d): %s\" % (self.schema_id, self.name, self.id, self.description) else:", "AbstractCodeInstance(models.Model): class Meta: abstract = True code = models.ForeignKey(Code) message", "same time but after the last message, for example. \"\"\"", "= models.IntegerField(default=0) def __unicode__(self): if self.description: return \"%s/%s (%d): %s\"", "name = models.CharField(max_length=100) full_name = models.CharField(max_length=250) email = models.CharField(max_length=250) def", "start, end): \"\"\" Get messages that are inclusively between the", "| models.Q(idx__lte=end.idx) before_last = models.Q(time__lte=end.time) & before_last else: before_last =", "\"\"\" if isinstance(start, Message): after_first = ~models.Q(session=start.session) | models.Q(idx__gte=start.idx) after_first", "= models.CharField(max_length=250) def __unicode__(self): return self.name class AbstractCodeInstance(models.Model): class Meta:", "idx = models.IntegerField() time = models.DateTimeField() type = models.IntegerField() participant", "codes = models.ManyToManyField(Code, through='CodeInstance') @classmethod def get_between(cls, start, end): \"\"\"", "example. \"\"\" if isinstance(start, Message): after_first = ~models.Q(session=start.session) | models.Q(idx__gte=start.idx)", "def __unicode__(self): return \"%d (%s - %s)\" % (self.id, str(self.started),", "models.PositiveIntegerField(default=0) schema = models.ForeignKey(Schema, related_name=\"codes\") code_type = models.IntegerField(default=0) def __unicode__(self):", "else: after_first = models.Q(time__gte=start) if isinstance(end, Message): before_last = ~models.Q(session=end.session)", "but after the last message, for example. \"\"\" if isinstance(start,", "def user_name(self): return self.participant.name @property def created_at(self): return self.time class", "User(models.Model): name = models.CharField(max_length=100) full_name = models.CharField(max_length=250) email = models.CharField(max_length=250)", "def __unicode__(self): return self.name class Message(models.Model): session = models.ForeignKey(Session) idx", "models.Q(time__gte=start) if isinstance(end, Message): before_last = ~models.Q(session=end.session) | models.Q(idx__lte=end.idx) before_last", "= models.TextField() active_instances = models.PositiveIntegerField(default=0) schema = models.ForeignKey(Schema, related_name=\"codes\") code_type", "Create your models here. class Schema(models.Model): name = models.CharField(max_length=200) description", "last message, for example. \"\"\" if isinstance(start, Message): after_first =", "return self.name class AbstractCodeInstance(models.Model): class Meta: abstract = True code", "models.TextField() active_instances = models.PositiveIntegerField(default=0) schema = models.ForeignKey(Schema, related_name=\"codes\") code_type =", "messages, meaning that you won't get messages at the same", "the same time but after the last message, for example.", "message, for example. \"\"\" if isinstance(start, Message): after_first = ~models.Q(session=start.session)", "for example. \"\"\" if isinstance(start, Message): after_first = ~models.Q(session=start.session) |", "models.Q(time__lte=end.time) & before_last else: before_last = models.Q(time__lte=end) return cls.objects.filter(after_first, before_last)", "str(self.ended)) class Participant(models.Model): name = models.CharField(max_length=100) description = models.TextField() def", "= ~models.Q(session=start.session) | models.Q(idx__gte=start.idx) after_first = models.Q(time__gte=start.time) & after_first else:", "description = models.TextField() active_instances = models.PositiveIntegerField(default=0) schema = models.ForeignKey(Schema, related_name=\"codes\")", "def __unicode__(self): if self.description: return \"%s/%s (%d): %s\" % (self.schema_id,", "models.ForeignKey(User) task_id = models.PositiveIntegerField() intensity = models.FloatField() flag = models.IntegerField()", "models.ForeignKey(Participant, related_name='messages') message = models.TextField() codes = models.ManyToManyField(Code, through='CodeInstance') @classmethod", "self.time class User(models.Model): name = models.CharField(max_length=100) full_name = models.CharField(max_length=250) email", "= models.CharField(max_length=250) email = models.CharField(max_length=250) def __unicode__(self): return self.name class", "email = models.CharField(max_length=250) def __unicode__(self): return self.name class AbstractCodeInstance(models.Model): class", "name = models.CharField(max_length=200) description = models.TextField() class Code(models.Model): name =", "meaning that you won't get messages at the same time", "else: return \"%s/%s (%d)\" % (self.schema_id, self.name, self.id) class DataSet(models.Model):", "class Code(models.Model): name = models.CharField(max_length=200) description = models.TextField() active_instances =", "class Participant(models.Model): name = models.CharField(max_length=100) description = models.TextField() def __unicode__(self):", "full_name = models.CharField(max_length=250) email = models.CharField(max_length=250) def __unicode__(self): return self.name", "description = models.TextField() def __unicode__(self): return self.name class Message(models.Model): session", "models.ForeignKey(Schema, related_name=\"codes\") code_type = models.IntegerField(default=0) def __unicode__(self): if self.description: return", "% (self.schema_id, self.name, self.id) class DataSet(models.Model): name = models.CharField(max_length=100) created", "end): \"\"\" Get messages that are inclusively between the two", "after_first = ~models.Q(session=start.session) | models.Q(idx__gte=start.idx) after_first = models.Q(time__gte=start.time) & after_first", "message = models.TextField() codes = models.ManyToManyField(Code, through='CodeInstance') @classmethod def get_between(cls,", "%s\" % (self.schema_id, self.name, self.id, self.description) else: return \"%s/%s (%d)\"", "models.IntegerField() participant = models.ForeignKey(Participant, related_name='messages') message = models.TextField() codes =", "= models.DateTimeField() def __unicode__(self): return \"%d (%s - %s)\" %", "Takes into account the exact ordering of messages, meaning that", "cls.objects.filter(after_first, before_last) @property def text(self): return self.message @property def user_name(self):", "text(self): return self.message @property def user_name(self): return self.participant.name @property def", "Schema(models.Model): name = models.CharField(max_length=200) description = models.TextField() class Code(models.Model): name", "models.CharField(max_length=250) email = models.CharField(max_length=250) def __unicode__(self): return self.name class AbstractCodeInstance(models.Model):", "else: before_last = models.Q(time__lte=end) return cls.objects.filter(after_first, before_last) @property def text(self):", "models.DateTimeField() type = models.IntegerField() participant = models.ForeignKey(Participant, related_name='messages') message =", "models.Q(time__lte=end) return cls.objects.filter(after_first, before_last) @property def text(self): return self.message @property", "added = models.DateTimeField() class CodeInstance(AbstractCodeInstance): user = models.ForeignKey(User) task_id =", "from django.db import models # Create your models here. class", "= models.CharField(max_length=100) created = models.DateTimeField() class Session(models.Model): set = models.ForeignKey(DataSet)", "self.name, self.id) class DataSet(models.Model): name = models.CharField(max_length=100) created = models.DateTimeField()", "models.CharField(max_length=100) description = models.TextField() def __unicode__(self): return self.name class Message(models.Model):", "inclusively between the two messages, or two dates. Takes into", "return \"%s/%s (%d)\" % (self.schema_id, self.name, self.id) class DataSet(models.Model): name", "messages that are inclusively between the two messages, or two", "= models.CharField(max_length=200) description = models.TextField() class Code(models.Model): name = models.CharField(max_length=200)", "after the last message, for example. \"\"\" if isinstance(start, Message):", "- %s)\" % (self.id, str(self.started), str(self.ended)) class Participant(models.Model): name =", "models.CharField(max_length=200) description = models.TextField() class Code(models.Model): name = models.CharField(max_length=200) description", "& before_last else: before_last = models.Q(time__lte=end) return cls.objects.filter(after_first, before_last) @property", "self.participant.name @property def created_at(self): return self.time class User(models.Model): name =", "~models.Q(session=end.session) | models.Q(idx__lte=end.idx) before_last = models.Q(time__lte=end.time) & before_last else: before_last", "models.TextField() codes = models.ManyToManyField(Code, through='CodeInstance') @classmethod def get_between(cls, start, end):", "models.ForeignKey(DataSet) started = models.DateTimeField() ended = models.DateTimeField() def __unicode__(self): return", "time = models.DateTimeField() type = models.IntegerField() participant = models.ForeignKey(Participant, related_name='messages')", "if isinstance(start, Message): after_first = ~models.Q(session=start.session) | models.Q(idx__gte=start.idx) after_first =", "= ~models.Q(session=end.session) | models.Q(idx__lte=end.idx) before_last = models.Q(time__lte=end.time) & before_last else:", "def text(self): return self.message @property def user_name(self): return self.participant.name @property", "__unicode__(self): if self.description: return \"%s/%s (%d): %s\" % (self.schema_id, self.name,", "Session(models.Model): set = models.ForeignKey(DataSet) started = models.DateTimeField() ended = models.DateTimeField()", "models # Create your models here. class Schema(models.Model): name =", "user = models.ForeignKey(User) task_id = models.PositiveIntegerField() intensity = models.FloatField() flag", "~models.Q(session=start.session) | models.Q(idx__gte=start.idx) after_first = models.Q(time__gte=start.time) & after_first else: after_first", "models.Q(idx__gte=start.idx) after_first = models.Q(time__gte=start.time) & after_first else: after_first = models.Q(time__gte=start)", "Meta: abstract = True code = models.ForeignKey(Code) message = models.ForeignKey(Message)", "% (self.schema_id, self.name, self.id, self.description) else: return \"%s/%s (%d)\" %", "models.CharField(max_length=100) created = models.DateTimeField() class Session(models.Model): set = models.ForeignKey(DataSet) started", "= models.DateTimeField() class Session(models.Model): set = models.ForeignKey(DataSet) started = models.DateTimeField()", "models.Q(time__gte=start.time) & after_first else: after_first = models.Q(time__gte=start) if isinstance(end, Message):", "@property def user_name(self): return self.participant.name @property def created_at(self): return self.time", "after_first = models.Q(time__gte=start) if isinstance(end, Message): before_last = ~models.Q(session=end.session) |", "return self.time class User(models.Model): name = models.CharField(max_length=100) full_name = models.CharField(max_length=250)", "self.id, self.description) else: return \"%s/%s (%d)\" % (self.schema_id, self.name, self.id)", "ordering of messages, meaning that you won't get messages at", "= models.TextField() class Code(models.Model): name = models.CharField(max_length=200) description = models.TextField()", "= models.ManyToManyField(Code, through='CodeInstance') @classmethod def get_between(cls, start, end): \"\"\" Get", "DataSet(models.Model): name = models.CharField(max_length=100) created = models.DateTimeField() class Session(models.Model): set", "(self.schema_id, self.name, self.id, self.description) else: return \"%s/%s (%d)\" % (self.schema_id,", "get_between(cls, start, end): \"\"\" Get messages that are inclusively between", "% (self.id, str(self.started), str(self.ended)) class Participant(models.Model): name = models.CharField(max_length=100) description", "django.db import models # Create your models here. class Schema(models.Model):", "after_first = models.Q(time__gte=start.time) & after_first else: after_first = models.Q(time__gte=start) if", "class Meta: abstract = True code = models.ForeignKey(Code) message =", "= models.ForeignKey(Code) message = models.ForeignKey(Message) added = models.DateTimeField() class CodeInstance(AbstractCodeInstance):", "= models.CharField(max_length=100) full_name = models.CharField(max_length=250) email = models.CharField(max_length=250) def __unicode__(self):", "True code = models.ForeignKey(Code) message = models.ForeignKey(Message) added = models.DateTimeField()", "Message(models.Model): session = models.ForeignKey(Session) idx = models.IntegerField() time = models.DateTimeField()", "models.CharField(max_length=100) full_name = models.CharField(max_length=250) email = models.CharField(max_length=250) def __unicode__(self): return", "before_last = ~models.Q(session=end.session) | models.Q(idx__lte=end.idx) before_last = models.Q(time__lte=end.time) & before_last", "Message): after_first = ~models.Q(session=start.session) | models.Q(idx__gte=start.idx) after_first = models.Q(time__gte=start.time) &", "= models.Q(time__lte=end) return cls.objects.filter(after_first, before_last) @property def text(self): return self.message", "models.CharField(max_length=200) description = models.TextField() active_instances = models.PositiveIntegerField(default=0) schema = models.ForeignKey(Schema,", "code_type = models.IntegerField(default=0) def __unicode__(self): if self.description: return \"%s/%s (%d):", "models.DateTimeField() def __unicode__(self): return \"%d (%s - %s)\" % (self.id,", "= models.Q(time__gte=start.time) & after_first else: after_first = models.Q(time__gte=start) if isinstance(end,", "& after_first else: after_first = models.Q(time__gte=start) if isinstance(end, Message): before_last", "class AbstractCodeInstance(models.Model): class Meta: abstract = True code = models.ForeignKey(Code)", "models.ForeignKey(Code) message = models.ForeignKey(Message) added = models.DateTimeField() class CodeInstance(AbstractCodeInstance): user", "class Message(models.Model): session = models.ForeignKey(Session) idx = models.IntegerField() time =", "before_last) @property def text(self): return self.message @property def user_name(self): return", "models.TextField() class Code(models.Model): name = models.CharField(max_length=200) description = models.TextField() active_instances", "if isinstance(end, Message): before_last = ~models.Q(session=end.session) | models.Q(idx__lte=end.idx) before_last =", "set = models.ForeignKey(DataSet) started = models.DateTimeField() ended = models.DateTimeField() def", "your models here. class Schema(models.Model): name = models.CharField(max_length=200) description =", "import models # Create your models here. class Schema(models.Model): name", "= models.IntegerField() participant = models.ForeignKey(Participant, related_name='messages') message = models.TextField() codes", "= models.DateTimeField() ended = models.DateTimeField() def __unicode__(self): return \"%d (%s", "= models.TextField() def __unicode__(self): return self.name class Message(models.Model): session =", "return \"%d (%s - %s)\" % (self.id, str(self.started), str(self.ended)) class", "= models.IntegerField() time = models.DateTimeField() type = models.IntegerField() participant =", "= models.Q(time__lte=end.time) & before_last else: before_last = models.Q(time__lte=end) return cls.objects.filter(after_first,", "def __unicode__(self): return self.name class AbstractCodeInstance(models.Model): class Meta: abstract =", "\"%s/%s (%d)\" % (self.schema_id, self.name, self.id) class DataSet(models.Model): name =", "Get messages that are inclusively between the two messages, or", "= models.TextField() codes = models.ManyToManyField(Code, through='CodeInstance') @classmethod def get_between(cls, start,", "the last message, for example. \"\"\" if isinstance(start, Message): after_first", "models.Q(idx__lte=end.idx) before_last = models.Q(time__lte=end.time) & before_last else: before_last = models.Q(time__lte=end)", "| models.Q(idx__gte=start.idx) after_first = models.Q(time__gte=start.time) & after_first else: after_first =", "self.name, self.id, self.description) else: return \"%s/%s (%d)\" % (self.schema_id, self.name,", "= models.DateTimeField() class CodeInstance(AbstractCodeInstance): user = models.ForeignKey(User) task_id = models.PositiveIntegerField()", "models.DateTimeField() class Session(models.Model): set = models.ForeignKey(DataSet) started = models.DateTimeField() ended", "= models.PositiveIntegerField(default=0) schema = models.ForeignKey(Schema, related_name=\"codes\") code_type = models.IntegerField(default=0) def", "def created_at(self): return self.time class User(models.Model): name = models.CharField(max_length=100) full_name", "that you won't get messages at the same time but", "into account the exact ordering of messages, meaning that you", "after_first else: after_first = models.Q(time__gte=start) if isinstance(end, Message): before_last =", "message = models.ForeignKey(Message) added = models.DateTimeField() class CodeInstance(AbstractCodeInstance): user =", "the exact ordering of messages, meaning that you won't get", "= models.ForeignKey(Session) idx = models.IntegerField() time = models.DateTimeField() type =", "messages at the same time but after the last message,", "= models.ForeignKey(Participant, related_name='messages') message = models.TextField() codes = models.ManyToManyField(Code, through='CodeInstance')", "are inclusively between the two messages, or two dates. Takes", "@property def text(self): return self.message @property def user_name(self): return self.participant.name", "name = models.CharField(max_length=100) description = models.TextField() def __unicode__(self): return self.name", "@classmethod def get_between(cls, start, end): \"\"\" Get messages that are", "self.description) else: return \"%s/%s (%d)\" % (self.schema_id, self.name, self.id) class", "description = models.TextField() class Code(models.Model): name = models.CharField(max_length=200) description =", "models.CharField(max_length=250) def __unicode__(self): return self.name class AbstractCodeInstance(models.Model): class Meta: abstract", "messages, or two dates. Takes into account the exact ordering", "self.name class Message(models.Model): session = models.ForeignKey(Session) idx = models.IntegerField() time", "\"%s/%s (%d): %s\" % (self.schema_id, self.name, self.id, self.description) else: return", "name = models.CharField(max_length=100) created = models.DateTimeField() class Session(models.Model): set =", "related_name='messages') message = models.TextField() codes = models.ManyToManyField(Code, through='CodeInstance') @classmethod def", "(%d)\" % (self.schema_id, self.name, self.id) class DataSet(models.Model): name = models.CharField(max_length=100)", "= models.ForeignKey(Message) added = models.DateTimeField() class CodeInstance(AbstractCodeInstance): user = models.ForeignKey(User)", "%s)\" % (self.id, str(self.started), str(self.ended)) class Participant(models.Model): name = models.CharField(max_length=100)", "time but after the last message, for example. \"\"\" if", "through='CodeInstance') @classmethod def get_between(cls, start, end): \"\"\" Get messages that", "return self.message @property def user_name(self): return self.participant.name @property def created_at(self):", "type = models.IntegerField() participant = models.ForeignKey(Participant, related_name='messages') message = models.TextField()", "return cls.objects.filter(after_first, before_last) @property def text(self): return self.message @property def", "started = models.DateTimeField() ended = models.DateTimeField() def __unicode__(self): return \"%d", "of messages, meaning that you won't get messages at the", "(self.id, str(self.started), str(self.ended)) class Participant(models.Model): name = models.CharField(max_length=100) description =", "or two dates. Takes into account the exact ordering of", "(%d): %s\" % (self.schema_id, self.name, self.id, self.description) else: return \"%s/%s", "class Session(models.Model): set = models.ForeignKey(DataSet) started = models.DateTimeField() ended =", "models.TextField() def __unicode__(self): return self.name class Message(models.Model): session = models.ForeignKey(Session)", "name = models.CharField(max_length=200) description = models.TextField() active_instances = models.PositiveIntegerField(default=0) schema", "the two messages, or two dates. Takes into account the", "return self.participant.name @property def created_at(self): return self.time class User(models.Model): name", "Participant(models.Model): name = models.CharField(max_length=100) description = models.TextField() def __unicode__(self): return", "@property def created_at(self): return self.time class User(models.Model): name = models.CharField(max_length=100)", "= models.ForeignKey(User) task_id = models.PositiveIntegerField() intensity = models.FloatField() flag =", "def get_between(cls, start, end): \"\"\" Get messages that are inclusively", "models.ForeignKey(Message) added = models.DateTimeField() class CodeInstance(AbstractCodeInstance): user = models.ForeignKey(User) task_id", "related_name=\"codes\") code_type = models.IntegerField(default=0) def __unicode__(self): if self.description: return \"%s/%s", "models.ForeignKey(Session) idx = models.IntegerField() time = models.DateTimeField() type = models.IntegerField()", "models.IntegerField() time = models.DateTimeField() type = models.IntegerField() participant = models.ForeignKey(Participant,", "dates. Takes into account the exact ordering of messages, meaning", "before_last = models.Q(time__lte=end) return cls.objects.filter(after_first, before_last) @property def text(self): return", "two dates. Takes into account the exact ordering of messages,", "ended = models.DateTimeField() def __unicode__(self): return \"%d (%s - %s)\"", "won't get messages at the same time but after the", "between the two messages, or two dates. Takes into account", "__unicode__(self): return self.name class AbstractCodeInstance(models.Model): class Meta: abstract = True", "if self.description: return \"%s/%s (%d): %s\" % (self.schema_id, self.name, self.id,", "class Schema(models.Model): name = models.CharField(max_length=200) description = models.TextField() class Code(models.Model):", "at the same time but after the last message, for", "models.ManyToManyField(Code, through='CodeInstance') @classmethod def get_between(cls, start, end): \"\"\" Get messages", "= models.ForeignKey(DataSet) started = models.DateTimeField() ended = models.DateTimeField() def __unicode__(self):", "str(self.started), str(self.ended)) class Participant(models.Model): name = models.CharField(max_length=100) description = models.TextField()", "account the exact ordering of messages, meaning that you won't", "that are inclusively between the two messages, or two dates.", "created = models.DateTimeField() class Session(models.Model): set = models.ForeignKey(DataSet) started =", "models.IntegerField(default=0) def __unicode__(self): if self.description: return \"%s/%s (%d): %s\" %", "= models.Q(time__gte=start) if isinstance(end, Message): before_last = ~models.Q(session=end.session) | models.Q(idx__lte=end.idx)", "models.DateTimeField() ended = models.DateTimeField() def __unicode__(self): return \"%d (%s -", "= True code = models.ForeignKey(Code) message = models.ForeignKey(Message) added =", "(self.schema_id, self.name, self.id) class DataSet(models.Model): name = models.CharField(max_length=100) created =", "__unicode__(self): return self.name class Message(models.Model): session = models.ForeignKey(Session) idx =", "participant = models.ForeignKey(Participant, related_name='messages') message = models.TextField() codes = models.ManyToManyField(Code,", "self.message @property def user_name(self): return self.participant.name @property def created_at(self): return", "Message): before_last = ~models.Q(session=end.session) | models.Q(idx__lte=end.idx) before_last = models.Q(time__lte=end.time) &", "# Create your models here. class Schema(models.Model): name = models.CharField(max_length=200)", "self.id) class DataSet(models.Model): name = models.CharField(max_length=100) created = models.DateTimeField() class", "return self.name class Message(models.Model): session = models.ForeignKey(Session) idx = models.IntegerField()", "here. class Schema(models.Model): name = models.CharField(max_length=200) description = models.TextField() class", "active_instances = models.PositiveIntegerField(default=0) schema = models.ForeignKey(Schema, related_name=\"codes\") code_type = models.IntegerField(default=0)", "before_last = models.Q(time__lte=end.time) & before_last else: before_last = models.Q(time__lte=end) return", "user_name(self): return self.participant.name @property def created_at(self): return self.time class User(models.Model):", "= models.DateTimeField() type = models.IntegerField() participant = models.ForeignKey(Participant, related_name='messages') message", "exact ordering of messages, meaning that you won't get messages", "code = models.ForeignKey(Code) message = models.ForeignKey(Message) added = models.DateTimeField() class", "before_last else: before_last = models.Q(time__lte=end) return cls.objects.filter(after_first, before_last) @property def", "self.name class AbstractCodeInstance(models.Model): class Meta: abstract = True code =", "= models.CharField(max_length=100) description = models.TextField() def __unicode__(self): return self.name class", "self.description: return \"%s/%s (%d): %s\" % (self.schema_id, self.name, self.id, self.description)", "(%s - %s)\" % (self.id, str(self.started), str(self.ended)) class Participant(models.Model): name", "= models.CharField(max_length=200) description = models.TextField() active_instances = models.PositiveIntegerField(default=0) schema =", "class User(models.Model): name = models.CharField(max_length=100) full_name = models.CharField(max_length=250) email =", "isinstance(start, Message): after_first = ~models.Q(session=start.session) | models.Q(idx__gte=start.idx) after_first = models.Q(time__gte=start.time)", "abstract = True code = models.ForeignKey(Code) message = models.ForeignKey(Message) added", "class DataSet(models.Model): name = models.CharField(max_length=100) created = models.DateTimeField() class Session(models.Model):", "you won't get messages at the same time but after", "models.DateTimeField() class CodeInstance(AbstractCodeInstance): user = models.ForeignKey(User) task_id = models.PositiveIntegerField() intensity", "class CodeInstance(AbstractCodeInstance): user = models.ForeignKey(User) task_id = models.PositiveIntegerField() intensity =", "__unicode__(self): return \"%d (%s - %s)\" % (self.id, str(self.started), str(self.ended))", "isinstance(end, Message): before_last = ~models.Q(session=end.session) | models.Q(idx__lte=end.idx) before_last = models.Q(time__lte=end.time)", "\"\"\" Get messages that are inclusively between the two messages,", "two messages, or two dates. Takes into account the exact", "models here. class Schema(models.Model): name = models.CharField(max_length=200) description = models.TextField()", "session = models.ForeignKey(Session) idx = models.IntegerField() time = models.DateTimeField() type", "get messages at the same time but after the last", "\"%d (%s - %s)\" % (self.id, str(self.started), str(self.ended)) class Participant(models.Model):", "created_at(self): return self.time class User(models.Model): name = models.CharField(max_length=100) full_name =", "Code(models.Model): name = models.CharField(max_length=200) description = models.TextField() active_instances = models.PositiveIntegerField(default=0)", "schema = models.ForeignKey(Schema, related_name=\"codes\") code_type = models.IntegerField(default=0) def __unicode__(self): if" ]
[ "parsed = urlparse.urlparse(purchase.invoice) _, _, purchase, _ = parsed.path.split('/') path", "class Container(object): def __init__(self, **kw): self.__dict__.update(kw) def do_purchase(products, emailaddress): params", "url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) else: url", "'ssh', 'org', 'emailaddress') def test_swish_payment(nodes, ssh, mailssh, org, emailaddress): #py.test.skip('Skip", "2.0 (the \"License\"); # you may not use this file", "'')) data = {'phone': '1231181189'} req = urllib2.Request(url, json.dumps(data), {'Content-Type':", "= client.open_sftp() sftp.put(str(pgfile), 'incoming/%s' % dest, confirm=False) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped',", "we can simply replace it in order to make partial", "'nodes', 'ssh', 'org', 'emailaddress') def test_swish_payment(nodes, ssh, mailssh, org, emailaddress):", "def check_mail(client, mailssh, purchase, mailtype): client.run('sendmail -qf') message, = mailssh.find_and_delete_mail(None,", "Copyright 2019 Open End AB # # Licensed under the", "urllib.request.Request(url, json.dumps(data).encode('ascii'), {'Content-Type': 'application/json'}) response = json.load(urllib.request.urlopen(req)) else: parsed =", "with check_mails(client, mailssh, purchase): print(purchase.invoice) if PYT3: parsed = urllib.parse.urlparse(purchase.invoice)", "emailaddress, tmpdir): purchase = do_purchase([org.product], emailaddress) with ssh() as client:", "contextlib import json import os import py import subprocess import", "paid assert '111,11' in msg # amount remaining upload_pg(tmpdir, ssh,", "pgdata3.replace('66666', '11111') # final 111.11 SEK upload_pg(tmpdir, ssh, partial_payment1) msg,", "= urlparse.urlparse(purchase.invoice) _, _, purchase, _ = parsed.path.split('/') path =", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", ". import support here = os.path.dirname(__file__) class Container(object): def __init__(self,", "1]): cmd = 'python /root/accounting/members/paymentgen.py %s %s %s' % (", "emailaddress} ] } if PYT3: req = urllib.request.Request(urllib.parse.urljoin(support.url, '/rest/purchase'), json.dumps(params).encode('ascii'),", "'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress') def test_partial_plusgiro_payment(ssh, mailssh, org,", "pgdata1.replace('66666', '22222') # pay 222.22 SEK partial_payment2 = pgdata2.replace('66666', '33333')", "PYT3: req = urllib.request.Request(url) response = json.load(urllib.request.urlopen(req)) else: req =", "json import os import py import subprocess import time import", "_, purchase, _ = parsed.path.split('/') path = '/providers/swish/charge/%s/%s' % (org.swish_provider,", "os import py import subprocess import time import uuid from", "support here = os.path.dirname(__file__) class Container(object): def __init__(self, **kw): self.__dict__.update(kw)", "json.load(urllib.request.urlopen(req)) else: parsed = urlparse.urlparse(purchase.invoice) _, _, purchase, _ =", "333.33 SEK final_payment = pgdata3.replace(b'66666', b'11111') # final 111.11 SEK", "with ssh() as client: with check_mails(client, mailssh, purchase): pgdata1 =", "check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert '333,33' in msg # amount", "'data': [ {'items': [{'product': product} for product in products], 'buyerName':", "path, '', '', '')) for _ in range(20): if PYT3:", "pay 333.33 SEK final_payment = pgdata3.replace(b'66666', b'11111') # final 111.11", "to make partial payments. if PYT3: partial_payment1 = pgdata1.replace(b'66666', b'22222')", "use this file except in compliance with the License. #", "'1231181189'} req = urllib2.Request(url, json.dumps(data), {'Content-Type': 'application/json'}) response = json.load(urllib2.urlopen(req))", "org, emailaddress, tmpdir): purchase = do_purchase([org.product], emailaddress) with ssh() as", "'444,44' in msg # amount remaining upload_pg(tmpdir, ssh, partial_payment2) msg,", "= gen_pg(client, org) # The sum is 66666 (öre). It", "import urllib.request import urllib.parse else: PYT3 = False import urllib2", "upload_pg(tmpdir, ssh, partial_payment1) msg, headers = check_mail(client, mailssh, purchase, 'partial-payment-confirmation')", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "response['id']) if PYT3: url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '',", "response = json.load(urllib2.urlopen(req)) print(response) if response['status'] == 'PAID': break time.sleep(1)", "assert response['status'] == 'CREATED' path = '/providers/swish/poll/%s/%s' % (org.swish_provider, response['id'])", "License. # You may obtain a copy of the License", "% (org.swish_provider, response['id']) if PYT3: url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path,", "__init__(self, **kw): self.__dict__.update(kw) def do_purchase(products, emailaddress): params = { 'data':", "'/providers/swish/charge/%s/%s' % (org.swish_provider, purchase) url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '',", "pgdata1.replace(b'66666', b'22222') # pay 222.22 SEK partial_payment2 = pgdata2.replace(b'66666', b'33333')", "org) pgdata2 = gen_pg(client, org) pgdata3 = gen_pg(client, org) #", "under the License is distributed on an \"AS IS\" BASIS,", "partial_payment1) msg, headers = check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert '222,22'", "License for the specific language governing permissions and # limitations", "path, '', '', '')) else: url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path,", "'', '')) data = {'phone': '1231181189'} req = urllib2.Request(url, json.dumps(data),", "111.11 SEK else: partial_payment1 = pgdata1.replace('66666', '22222') # pay 222.22", "%s %s %s' % ( org.id, id_args[0], id_args[1]) id_args[0] +=", "+ cmd) return stdout.read() def upload_pg(tmpdir, ssh, pgdata): pgfile =", "check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert '222,22' in msg # amount", "'/providers/swish/poll/%s/%s' % (org.swish_provider, response['id']) if PYT3: url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc,", "else: req = urllib2.Request(urlparse.urljoin(support.url, '/rest/purchase'), json.dumps(params), {'Content-Type': 'application/json'}) data =", "final_payment) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'nodes', 'ssh', 'org', 'emailaddress') def", "as client: with check_mails(client, mailssh, purchase): print(purchase.invoice) if PYT3: parsed", "amount remaining upload_pg(tmpdir, ssh, final_payment) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'nodes',", "import py import subprocess import time import uuid from .", "org, emailaddress): #py.test.skip('Skip swish tests until certificates work') purchase =", "until certificates work') purchase = do_purchase([org.product], emailaddress) with ssh() as", "in compliance with the License. # You may obtain a", "purchase.buyerEmail) msg, headers = mailssh.parse(message) assert headers['X-OE-MailType'] == [mailtype] assert", "'bootstrapped', 'mailssh', 'nodes', 'ssh', 'org', 'emailaddress') def test_swish_payment(nodes, ssh, mailssh,", "data = json.load(urllib2.urlopen(req)) return Container(id=data['purchase'], invoice=data['invoiceUrl'], buyerEmail=emailaddress) def check_mail(client, mailssh,", "software # distributed under the License is distributed on an", "upload_pg(tmpdir, ssh, partial_payment2) msg, headers = check_mail(client, mailssh, purchase, 'partial-payment-confirmation')", "client: with check_mails(client, mailssh, purchase): pgdata = gen_pg(client, org) upload_pg(tmpdir,", "with check_mails(client, mailssh, purchase): pgdata1 = gen_pg(client, org) pgdata2 =", "ssh, partial_payment2) msg, headers = check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert", "do_purchase(products, emailaddress): params = { 'data': [ {'items': [{'product': product}", "'partial-payment-confirmation') assert '222,22' in msg # amount paid assert '444,44'", "{'Content-Type': 'application/json'}) response = json.load(urllib2.urlopen(req)) print(response) assert response['status'] == 'CREATED'", "# pay 333.33 SEK final_payment = pgdata3.replace(b'66666', b'11111') # final", "purchase, 'full-payment-confirmation') def gen_pg(client, org, id_args=[1, 1]): cmd = 'python", "mailssh, purchase): print(purchase.invoice) if PYT3: parsed = urllib.parse.urlparse(purchase.invoice) _, _,", "'emailaddress') def test_partial_plusgiro_payment(ssh, mailssh, org, emailaddress, tmpdir): purchase = do_purchase([org.product],", "'clean_db', 'bootstrapped', 'mailssh', 'nodes', 'ssh', 'org', 'emailaddress') def test_swish_payment(nodes, ssh,", "id_args[1] += 1000 stdin, stdout, stderr = client.exec_command('PYTHONPATH=/root/accounting ' +", "purchase, mailtype): client.run('sendmail -qf') message, = mailssh.find_and_delete_mail(None, 'TO', purchase.buyerEmail) msg,", "import contextlib import json import os import py import subprocess", "gen_pg(client, org) upload_pg(tmpdir, ssh, pgdata) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh',", "Container(id=data['purchase'], invoice=data['invoiceUrl'], buyerEmail=emailaddress) def check_mail(client, mailssh, purchase, mailtype): client.run('sendmail -qf')", "@contextlib.contextmanager def check_mails(client, mailssh, purchase): check_mail(client, mailssh, purchase, 'order-confirmation') yield", "purchase): pgdata1 = gen_pg(client, org) pgdata2 = gen_pg(client, org) pgdata3", "json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(urlparse.urljoin(support.url, '/rest/purchase'), json.dumps(params), {'Content-Type': 'application/json'}) data", "sys if (sys.version_info >=(3, 0)): PYT3 = True import urllib.request", "urllib2 import urlparse import contextlib import json import os import", "= 'python /root/accounting/members/paymentgen.py %s %s %s' % ( org.id, id_args[0],", "'incoming/%s' % dest, confirm=False) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org',", "# -*- coding: utf-8 -*- # Copyright 2019 Open End", "= json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(url) response = json.load(urllib2.urlopen(req)) print(response)", "pgdata): pgfile = tmpdir.join('pgfile') pgfile.write(pgdata) dest = uuid.uuid4() with ssh(username='nordea')", "# pay 222.22 SEK partial_payment2 = pgdata2.replace('66666', '33333') # pay", "urllib2.Request(url) response = json.load(urllib2.urlopen(req)) print(response) if response['status'] == 'PAID': break", "'org', 'emailaddress') def test_partial_plusgiro_payment(ssh, mailssh, org, emailaddress, tmpdir): purchase =", "org.id, id_args[0], id_args[1]) id_args[0] += 1 id_args[1] += 1000 stdin,", "json.dumps(data), {'Content-Type': 'application/json'}) response = json.load(urllib2.urlopen(req)) print(response) assert response['status'] ==", "payments. if PYT3: partial_payment1 = pgdata1.replace(b'66666', b'22222') # pay 222.22", "% (org.swish_provider, purchase) url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '',", "== 'CREATED' path = '/providers/swish/poll/%s/%s' % (org.swish_provider, response['id']) if PYT3:", "data = json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(urlparse.urljoin(support.url, '/rest/purchase'), json.dumps(params), {'Content-Type':", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "% dest, confirm=False) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress')", "if PYT3: parsed = urllib.parse.urlparse(purchase.invoice) _, _, purchase, _ =", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "else: PYT3 = False import urllib2 import urlparse import contextlib", "= { 'data': [ {'items': [{'product': product} for product in", "cmd) return stdout.read() def upload_pg(tmpdir, ssh, pgdata): pgfile = tmpdir.join('pgfile')", "to in writing, software # distributed under the License is", "url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) data =", "2019 Open End AB # # Licensed under the Apache", "# limitations under the License. import sys if (sys.version_info >=(3,", "# See the License for the specific language governing permissions", "= '/providers/swish/charge/%s/%s' % (org.swish_provider, purchase) url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path,", "= pgdata3.replace('66666', '11111') # final 111.11 SEK upload_pg(tmpdir, ssh, partial_payment1)", "mailtype): client.run('sendmail -qf') message, = mailssh.find_and_delete_mail(None, 'TO', purchase.buyerEmail) msg, headers", "'', '')) for _ in range(20): if PYT3: req =", "= client.exec_command('PYTHONPATH=/root/accounting ' + cmd) return stdout.read() def upload_pg(tmpdir, ssh,", "= pgdata3.replace(b'66666', b'11111') # final 111.11 SEK else: partial_payment1 =", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "uuid from . import support here = os.path.dirname(__file__) class Container(object):", "client.run('sendmail -qf') message, = mailssh.find_and_delete_mail(None, 'TO', purchase.buyerEmail) msg, headers =", "mailssh.parse(message) assert headers['X-OE-MailType'] == [mailtype] assert purchase.invoice in msg return", "(öre). It is probably unique in the fake pgfile, #", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "tests until certificates work') purchase = do_purchase([org.product], emailaddress) with ssh()", "headers = check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert '333,33' in msg", "[mailtype] assert purchase.invoice in msg return msg, headers @contextlib.contextmanager def", "+= 1000 stdin, stdout, stderr = client.exec_command('PYTHONPATH=/root/accounting ' + cmd)", "with the License. # You may obtain a copy of", "# so we can simply replace it in order to", "path, '', '', '')) data = {'phone': '1231181189'} req =", "if PYT3: req = urllib.request.Request(url) response = json.load(urllib.request.urlopen(req)) else: req", "req = urllib.request.Request(url) response = json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(url)", "1000 stdin, stdout, stderr = client.exec_command('PYTHONPATH=/root/accounting ' + cmd) return", "pgdata2.replace(b'66666', b'33333') # pay 333.33 SEK final_payment = pgdata3.replace(b'66666', b'11111')", "import urlparse import contextlib import json import os import py", "simply replace it in order to make partial payments. if", "req = urllib.request.Request(url, json.dumps(data).encode('ascii'), {'Content-Type': 'application/json'}) response = json.load(urllib.request.urlopen(req)) else:", "_ in range(20): if PYT3: req = urllib.request.Request(url) response =", "pgdata) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress') def test_partial_plusgiro_payment(ssh,", "swish tests until certificates work') purchase = do_purchase([org.product], emailaddress) with", "% ( org.id, id_args[0], id_args[1]) id_args[0] += 1 id_args[1] +=", "ssh() as client: with check_mails(client, mailssh, purchase): pgdata = gen_pg(client,", "= {'phone': '1231181189'} req = urllib.request.Request(url, json.dumps(data).encode('ascii'), {'Content-Type': 'application/json'}) response", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "'/providers/swish/charge/%s/%s' % (org.swish_provider, purchase) url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '',", "ssh, mailssh, org, emailaddress): #py.test.skip('Skip swish tests until certificates work')", "urllib2.Request(urlparse.urljoin(support.url, '/rest/purchase'), json.dumps(params), {'Content-Type': 'application/json'}) data = json.load(urllib2.urlopen(req)) return Container(id=data['purchase'],", "distributed under the License is distributed on an \"AS IS\"", "parsed.path.split('/') path = '/providers/swish/charge/%s/%s' % (org.swish_provider, purchase) url = urllib.parse.urlunparse((parsed.scheme,", "= urllib.parse.urlparse(purchase.invoice) _, _, purchase, _ = parsed.path.split('/') path =", "purchase) url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) data", "express or implied. # See the License for the specific", "@py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'nodes', 'ssh', 'org', 'emailaddress') def test_swish_payment(nodes,", "except in compliance with the License. # You may obtain", "= check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert '333,33' in msg #", "urllib2.Request(url, json.dumps(data), {'Content-Type': 'application/json'}) response = json.load(urllib2.urlopen(req)) print(response) assert response['status']", "'', '')) data = {'phone': '1231181189'} req = urllib.request.Request(url, json.dumps(data).encode('ascii'),", "msg # amount paid assert '444,44' in msg # amount", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "== [mailtype] assert purchase.invoice in msg return msg, headers @contextlib.contextmanager", "not use this file except in compliance with the License.", "time import uuid from . import support here = os.path.dirname(__file__)", "333.33 SEK final_payment = pgdata3.replace('66666', '11111') # final 111.11 SEK", "sum is 66666 (öre). It is probably unique in the", "'emailaddress') def test_swish_payment(nodes, ssh, mailssh, org, emailaddress): #py.test.skip('Skip swish tests", "def check_mails(client, mailssh, purchase): check_mail(client, mailssh, purchase, 'order-confirmation') yield check_mail(client,", "mailssh, purchase, 'partial-payment-confirmation') assert '222,22' in msg # amount paid", "= urllib.request.Request(url) response = json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(url) response", "= urllib2.Request(url) response = json.load(urllib2.urlopen(req)) print(response) if response['status'] == 'PAID':", "tmpdir): purchase = do_purchase([org.product], emailaddress) with ssh() as client: with", "writing, software # distributed under the License is distributed on", "parsed = urllib.parse.urlparse(purchase.invoice) _, _, purchase, _ = parsed.path.split('/') path", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "as client: with check_mails(client, mailssh, purchase): pgdata = gen_pg(client, org)", "= mailssh.parse(message) assert headers['X-OE-MailType'] == [mailtype] assert purchase.invoice in msg", "response = json.load(urllib.request.urlopen(req)) else: parsed = urlparse.urlparse(purchase.invoice) _, _, purchase,", "True import urllib.request import urllib.parse else: PYT3 = False import", "'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress') def test_full_plusgiro_payment(mailssh, ssh, org,", "import urllib.parse else: PYT3 = False import urllib2 import urlparse", "partial_payment1 = pgdata1.replace(b'66666', b'22222') # pay 222.22 SEK partial_payment2 =", "if PYT3: url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', ''))", "CONDITIONS OF ANY KIND, either express or implied. # See", "url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) for _", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "b'33333') # pay 333.33 SEK final_payment = pgdata3.replace(b'66666', b'11111') #", "msg # amount remaining upload_pg(tmpdir, ssh, final_payment) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped',", "remaining upload_pg(tmpdir, ssh, partial_payment2) msg, headers = check_mail(client, mailssh, purchase,", "return stdout.read() def upload_pg(tmpdir, ssh, pgdata): pgfile = tmpdir.join('pgfile') pgfile.write(pgdata)", "'ssh', 'org', 'emailaddress') def test_full_plusgiro_payment(mailssh, ssh, org, emailaddress, tmpdir): purchase", "do_purchase([org.product], emailaddress) with ssh() as client: with check_mails(client, mailssh, purchase):", "'application/json'}) response = json.load(urllib.request.urlopen(req)) else: parsed = urlparse.urlparse(purchase.invoice) _, _,", "= urllib2.Request(url, json.dumps(data), {'Content-Type': 'application/json'}) response = json.load(urllib2.urlopen(req)) print(response) assert", "path = '/providers/swish/poll/%s/%s' % (org.swish_provider, response['id']) if PYT3: url =", "Container(object): def __init__(self, **kw): self.__dict__.update(kw) def do_purchase(products, emailaddress): params =", "def do_purchase(products, emailaddress): params = { 'data': [ {'items': [{'product':", "partial_payment2 = pgdata2.replace(b'66666', b'33333') # pay 333.33 SEK final_payment =", "= json.load(urllib2.urlopen(req)) print(response) assert response['status'] == 'CREATED' path = '/providers/swish/poll/%s/%s'", "client: with check_mails(client, mailssh, purchase): print(purchase.invoice) if PYT3: parsed =", "response = json.load(urllib2.urlopen(req)) print(response) assert response['status'] == 'CREATED' path =", "PYT3 = True import urllib.request import urllib.parse else: PYT3 =", "self.__dict__.update(kw) def do_purchase(products, emailaddress): params = { 'data': [ {'items':", "'order-confirmation') yield check_mail(client, mailssh, purchase, 'full-payment-confirmation') def gen_pg(client, org, id_args=[1,", "import subprocess import time import uuid from . import support", "' + cmd) return stdout.read() def upload_pg(tmpdir, ssh, pgdata): pgfile", "End AB # # Licensed under the Apache License, Version", "import sys if (sys.version_info >=(3, 0)): PYT3 = True import", "governing permissions and # limitations under the License. import sys", "urlparse import contextlib import json import os import py import", "{'items': [{'product': product} for product in products], 'buyerName': '<NAME>', 'buyerEmail':", "= gen_pg(client, org) pgdata3 = gen_pg(client, org) # The sum", "remaining upload_pg(tmpdir, ssh, final_payment) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'nodes', 'ssh',", "purchase.invoice in msg return msg, headers @contextlib.contextmanager def check_mails(client, mailssh,", "OR CONDITIONS OF ANY KIND, either express or implied. #", "else: partial_payment1 = pgdata1.replace('66666', '22222') # pay 222.22 SEK partial_payment2", "pgdata1 = gen_pg(client, org) pgdata2 = gen_pg(client, org) pgdata3 =", "purchase, 'partial-payment-confirmation') assert '333,33' in msg # amount paid assert", "parsed.netloc, path, '', '', '')) else: url = urlparse.urlunparse((parsed.scheme, parsed.netloc,", "the License is distributed on an \"AS IS\" BASIS, #", "assert '444,44' in msg # amount remaining upload_pg(tmpdir, ssh, partial_payment2)", "org) # The sum is 66666 (öre). It is probably", "upload_pg(tmpdir, ssh, pgdata): pgfile = tmpdir.join('pgfile') pgfile.write(pgdata) dest = uuid.uuid4()", "work') purchase = do_purchase([org.product], emailaddress) with ssh() as client: with", "unique in the fake pgfile, # so we can simply", "check_mails(client, mailssh, purchase): print(purchase.invoice) if PYT3: parsed = urllib.parse.urlparse(purchase.invoice) _,", "urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) data = {'phone': '1231181189'}", "data = {'phone': '1231181189'} req = urllib2.Request(url, json.dumps(data), {'Content-Type': 'application/json'})", "probably unique in the fake pgfile, # so we can", "json.load(urllib2.urlopen(req)) return Container(id=data['purchase'], invoice=data['invoiceUrl'], buyerEmail=emailaddress) def check_mail(client, mailssh, purchase, mailtype):", "mailssh, org, emailaddress, tmpdir): purchase = do_purchase([org.product], emailaddress) with ssh()", "assert '333,33' in msg # amount paid assert '111,11' in", "purchase): check_mail(client, mailssh, purchase, 'order-confirmation') yield check_mail(client, mailssh, purchase, 'full-payment-confirmation')", "SEK final_payment = pgdata3.replace(b'66666', b'11111') # final 111.11 SEK else:", "_ = parsed.path.split('/') path = '/providers/swish/charge/%s/%s' % (org.swish_provider, purchase) url", "is probably unique in the fake pgfile, # so we", "222.22 SEK partial_payment2 = pgdata2.replace('66666', '33333') # pay 333.33 SEK", "= urllib2.Request(urlparse.urljoin(support.url, '/rest/purchase'), json.dumps(params), {'Content-Type': 'application/json'}) data = json.load(urllib2.urlopen(req)) return", "as client: with check_mails(client, mailssh, purchase): pgdata1 = gen_pg(client, org)", "'111,11' in msg # amount remaining upload_pg(tmpdir, ssh, final_payment) @py.test.mark.usefixtures('cluster',", "urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) for _ in range(20):", "os.path.dirname(__file__) class Container(object): def __init__(self, **kw): self.__dict__.update(kw) def do_purchase(products, emailaddress):", "gen_pg(client, org) # The sum is 66666 (öre). It is", "certificates work') purchase = do_purchase([org.product], emailaddress) with ssh() as client:", "purchase): print(purchase.invoice) if PYT3: parsed = urllib.parse.urlparse(purchase.invoice) _, _, purchase,", "= {'phone': '1231181189'} req = urllib2.Request(url, json.dumps(data), {'Content-Type': 'application/json'}) response", "final 111.11 SEK upload_pg(tmpdir, ssh, partial_payment1) msg, headers = check_mail(client,", "**kw): self.__dict__.update(kw) def do_purchase(products, emailaddress): params = { 'data': [", "law or agreed to in writing, software # distributed under", "'org', 'emailaddress') def test_swish_payment(nodes, ssh, mailssh, org, emailaddress): #py.test.skip('Skip swish", "pgfile = tmpdir.join('pgfile') pgfile.write(pgdata) dest = uuid.uuid4() with ssh(username='nordea') as", "] } if PYT3: req = urllib.request.Request(urllib.parse.urljoin(support.url, '/rest/purchase'), json.dumps(params).encode('ascii'), {'Content-Type':", "as client: sftp = client.open_sftp() sftp.put(str(pgfile), 'incoming/%s' % dest, confirm=False)", "mailssh, purchase, 'partial-payment-confirmation') assert '333,33' in msg # amount paid", "ssh, pgdata): pgfile = tmpdir.join('pgfile') pgfile.write(pgdata) dest = uuid.uuid4() with", "'CREATED' path = '/providers/swish/poll/%s/%s' % (org.swish_provider, response['id']) if PYT3: url", "pgdata2.replace('66666', '33333') # pay 333.33 SEK final_payment = pgdata3.replace('66666', '11111')", "@py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress') def test_full_plusgiro_payment(mailssh, ssh,", "if PYT3: req = urllib.request.Request(urllib.parse.urljoin(support.url, '/rest/purchase'), json.dumps(params).encode('ascii'), {'Content-Type': 'application/json'}) data", "confirm=False) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress') def test_full_plusgiro_payment(mailssh,", "in products], 'buyerName': '<NAME>', 'buyerEmail': emailaddress} ] } if PYT3:", "# amount remaining upload_pg(tmpdir, ssh, partial_payment2) msg, headers = check_mail(client,", "import os import py import subprocess import time import uuid", "'mailssh', 'ssh', 'org', 'emailaddress') def test_partial_plusgiro_payment(ssh, mailssh, org, emailaddress, tmpdir):", "may obtain a copy of the License at # #", "[{'product': product} for product in products], 'buyerName': '<NAME>', 'buyerEmail': emailaddress}", "-qf') message, = mailssh.find_and_delete_mail(None, 'TO', purchase.buyerEmail) msg, headers = mailssh.parse(message)", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "pgdata3.replace(b'66666', b'11111') # final 111.11 SEK else: partial_payment1 = pgdata1.replace('66666',", "json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(url) response = json.load(urllib2.urlopen(req)) print(response) if", "= pgdata1.replace('66666', '22222') # pay 222.22 SEK partial_payment2 = pgdata2.replace('66666',", "stderr = client.exec_command('PYTHONPATH=/root/accounting ' + cmd) return stdout.read() def upload_pg(tmpdir,", "# final 111.11 SEK else: partial_payment1 = pgdata1.replace('66666', '22222') #", "# pay 222.22 SEK partial_payment2 = pgdata2.replace(b'66666', b'33333') # pay", "def upload_pg(tmpdir, ssh, pgdata): pgfile = tmpdir.join('pgfile') pgfile.write(pgdata) dest =", "may not use this file except in compliance with the", "check_mail(client, mailssh, purchase, 'order-confirmation') yield check_mail(client, mailssh, purchase, 'full-payment-confirmation') def", "The sum is 66666 (öre). It is probably unique in", "is 66666 (öre). It is probably unique in the fake", "else: url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) for", "check_mails(client, mailssh, purchase): pgdata = gen_pg(client, org) upload_pg(tmpdir, ssh, pgdata)", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "in msg # amount paid assert '111,11' in msg #", "it in order to make partial payments. if PYT3: partial_payment1", "this file except in compliance with the License. # You", "here = os.path.dirname(__file__) class Container(object): def __init__(self, **kw): self.__dict__.update(kw) def", "(org.swish_provider, purchase) url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', ''))", "/root/accounting/members/paymentgen.py %s %s %s' % ( org.id, id_args[0], id_args[1]) id_args[0]", "in msg # amount remaining upload_pg(tmpdir, ssh, partial_payment2) msg, headers", "= json.load(urllib.request.urlopen(req)) else: parsed = urlparse.urlparse(purchase.invoice) _, _, purchase, _", "py import subprocess import time import uuid from . import", "order to make partial payments. if PYT3: partial_payment1 = pgdata1.replace(b'66666',", "b'22222') # pay 222.22 SEK partial_payment2 = pgdata2.replace(b'66666', b'33333') #", "'22222') # pay 222.22 SEK partial_payment2 = pgdata2.replace('66666', '33333') #", "purchase): pgdata = gen_pg(client, org) upload_pg(tmpdir, ssh, pgdata) @py.test.mark.usefixtures('cluster', 'clean_db',", "'mailssh', 'ssh', 'org', 'emailaddress') def test_full_plusgiro_payment(mailssh, ssh, org, emailaddress, tmpdir):", "sftp.put(str(pgfile), 'incoming/%s' % dest, confirm=False) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh',", "in msg # amount remaining upload_pg(tmpdir, ssh, final_payment) @py.test.mark.usefixtures('cluster', 'clean_db',", "else: parsed = urlparse.urlparse(purchase.invoice) _, _, purchase, _ = parsed.path.split('/')", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "req = urllib2.Request(url) response = json.load(urllib2.urlopen(req)) print(response) if response['status'] ==", "in the fake pgfile, # so we can simply replace", "'11111') # final 111.11 SEK upload_pg(tmpdir, ssh, partial_payment1) msg, headers", "# # Licensed under the Apache License, Version 2.0 (the", "test_partial_plusgiro_payment(ssh, mailssh, org, emailaddress, tmpdir): purchase = do_purchase([org.product], emailaddress) with", "sftp = client.open_sftp() sftp.put(str(pgfile), 'incoming/%s' % dest, confirm=False) @py.test.mark.usefixtures('cluster', 'clean_db',", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "'', '', '')) data = {'phone': '1231181189'} req = urllib.request.Request(url,", "<gh_stars>0 # -*- coding: utf-8 -*- # Copyright 2019 Open", "# Copyright 2019 Open End AB # # Licensed under", ">=(3, 0)): PYT3 = True import urllib.request import urllib.parse else:", "ssh(username='nordea') as client: sftp = client.open_sftp() sftp.put(str(pgfile), 'incoming/%s' % dest,", "'')) data = {'phone': '1231181189'} req = urllib.request.Request(url, json.dumps(data).encode('ascii'), {'Content-Type':", "111.11 SEK upload_pg(tmpdir, ssh, partial_payment1) msg, headers = check_mail(client, mailssh,", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "msg, headers @contextlib.contextmanager def check_mails(client, mailssh, purchase): check_mail(client, mailssh, purchase,", "check_mail(client, mailssh, purchase, 'full-payment-confirmation') def gen_pg(client, org, id_args=[1, 1]): cmd", "# amount remaining upload_pg(tmpdir, ssh, final_payment) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh',", "check_mail(client, mailssh, purchase, mailtype): client.run('sendmail -qf') message, = mailssh.find_and_delete_mail(None, 'TO',", "with ssh() as client: with check_mails(client, mailssh, purchase): pgdata =", "utf-8 -*- # Copyright 2019 Open End AB # #", "emailaddress): #py.test.skip('Skip swish tests until certificates work') purchase = do_purchase([org.product],", "return msg, headers @contextlib.contextmanager def check_mails(client, mailssh, purchase): check_mail(client, mailssh,", "replace it in order to make partial payments. if PYT3:", "import json import os import py import subprocess import time", "client: with check_mails(client, mailssh, purchase): pgdata1 = gen_pg(client, org) pgdata2", "= json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(urlparse.urljoin(support.url, '/rest/purchase'), json.dumps(params), {'Content-Type': 'application/json'})", "'buyerEmail': emailaddress} ] } if PYT3: req = urllib.request.Request(urllib.parse.urljoin(support.url, '/rest/purchase'),", "purchase, 'order-confirmation') yield check_mail(client, mailssh, purchase, 'full-payment-confirmation') def gen_pg(client, org,", "'', '', '')) for _ in range(20): if PYT3: req", "%s' % ( org.id, id_args[0], id_args[1]) id_args[0] += 1 id_args[1]", "language governing permissions and # limitations under the License. import", "urlparse.urlparse(purchase.invoice) _, _, purchase, _ = parsed.path.split('/') path = '/providers/swish/charge/%s/%s'", "= urllib.request.Request(urllib.parse.urljoin(support.url, '/rest/purchase'), json.dumps(params).encode('ascii'), {'Content-Type': 'application/json'}) data = json.load(urllib.request.urlopen(req)) else:", "response['status'] == 'CREATED' path = '/providers/swish/poll/%s/%s' % (org.swish_provider, response['id']) if", "'application/json'}) data = json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(urlparse.urljoin(support.url, '/rest/purchase'), json.dumps(params),", "{'Content-Type': 'application/json'}) data = json.load(urllib2.urlopen(req)) return Container(id=data['purchase'], invoice=data['invoiceUrl'], buyerEmail=emailaddress) def", "'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress') def test_full_plusgiro_payment(mailssh, ssh, org, emailaddress,", "amount remaining upload_pg(tmpdir, ssh, partial_payment2) msg, headers = check_mail(client, mailssh,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "PYT3 = False import urllib2 import urlparse import contextlib import", "final_payment = pgdata3.replace(b'66666', b'11111') # final 111.11 SEK else: partial_payment1", "cmd = 'python /root/accounting/members/paymentgen.py %s %s %s' % ( org.id,", "headers = check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert '222,22' in msg", "parsed.netloc, path, '', '', '')) data = {'phone': '1231181189'} req", "'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress') def test_partial_plusgiro_payment(ssh, mailssh, org, emailaddress,", "or implied. # See the License for the specific language", "AB # # Licensed under the Apache License, Version 2.0", "= pgdata2.replace('66666', '33333') # pay 333.33 SEK final_payment = pgdata3.replace('66666',", "emailaddress) with ssh() as client: with check_mails(client, mailssh, purchase): print(purchase.invoice)", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "assert '222,22' in msg # amount paid assert '444,44' in", "License. import sys if (sys.version_info >=(3, 0)): PYT3 = True", "-*- coding: utf-8 -*- # Copyright 2019 Open End AB", "= urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) data = {'phone':", "params = { 'data': [ {'items': [{'product': product} for product", "'buyerName': '<NAME>', 'buyerEmail': emailaddress} ] } if PYT3: req =", "check_mails(client, mailssh, purchase): check_mail(client, mailssh, purchase, 'order-confirmation') yield check_mail(client, mailssh,", "in range(20): if PYT3: req = urllib.request.Request(url) response = json.load(urllib.request.urlopen(req))", "urllib.request import urllib.parse else: PYT3 = False import urllib2 import", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "= json.load(urllib2.urlopen(req)) return Container(id=data['purchase'], invoice=data['invoiceUrl'], buyerEmail=emailaddress) def check_mail(client, mailssh, purchase,", "%s %s' % ( org.id, id_args[0], id_args[1]) id_args[0] += 1", "'222,22' in msg # amount paid assert '444,44' in msg", "'ssh', 'org', 'emailaddress') def test_partial_plusgiro_payment(ssh, mailssh, org, emailaddress, tmpdir): purchase", "emailaddress) with ssh() as client: with check_mails(client, mailssh, purchase): pgdata1", "product} for product in products], 'buyerName': '<NAME>', 'buyerEmail': emailaddress} ]", "# amount paid assert '111,11' in msg # amount remaining", "id_args=[1, 1]): cmd = 'python /root/accounting/members/paymentgen.py %s %s %s' %", "mailssh, purchase, mailtype): client.run('sendmail -qf') message, = mailssh.find_and_delete_mail(None, 'TO', purchase.buyerEmail)", "amount paid assert '111,11' in msg # amount remaining upload_pg(tmpdir,", "(the \"License\"); # you may not use this file except", "else: req = urllib2.Request(url) response = json.load(urllib2.urlopen(req)) print(response) if response['status']", "= urllib.request.Request(url, json.dumps(data).encode('ascii'), {'Content-Type': 'application/json'}) response = json.load(urllib.request.urlopen(req)) else: parsed", "# you may not use this file except in compliance", "= tmpdir.join('pgfile') pgfile.write(pgdata) dest = uuid.uuid4() with ssh(username='nordea') as client:", "org) pgdata3 = gen_pg(client, org) # The sum is 66666", "url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) data =", "# The sum is 66666 (öre). It is probably unique", "222.22 SEK partial_payment2 = pgdata2.replace(b'66666', b'33333') # pay 333.33 SEK", "# final 111.11 SEK upload_pg(tmpdir, ssh, partial_payment1) msg, headers =", "assert '111,11' in msg # amount remaining upload_pg(tmpdir, ssh, final_payment)", "#py.test.skip('Skip swish tests until certificates work') purchase = do_purchase([org.product], emailaddress)", "req = urllib2.Request(urlparse.urljoin(support.url, '/rest/purchase'), json.dumps(params), {'Content-Type': 'application/json'}) data = json.load(urllib2.urlopen(req))", "ssh() as client: with check_mails(client, mailssh, purchase): print(purchase.invoice) if PYT3:", "= urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) for _ in", "'/rest/purchase'), json.dumps(params).encode('ascii'), {'Content-Type': 'application/json'}) data = json.load(urllib.request.urlopen(req)) else: req =", "'')) for _ in range(20): if PYT3: req = urllib.request.Request(url)", "# # Unless required by applicable law or agreed to", "+= 1 id_args[1] += 1000 stdin, stdout, stderr = client.exec_command('PYTHONPATH=/root/accounting", "print(response) assert response['status'] == 'CREATED' path = '/providers/swish/poll/%s/%s' % (org.swish_provider,", "pgdata2 = gen_pg(client, org) pgdata3 = gen_pg(client, org) # The", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "pgfile.write(pgdata) dest = uuid.uuid4() with ssh(username='nordea') as client: sftp =", "ssh() as client: with check_mails(client, mailssh, purchase): pgdata1 = gen_pg(client,", "with ssh(username='nordea') as client: sftp = client.open_sftp() sftp.put(str(pgfile), 'incoming/%s' %", "def test_full_plusgiro_payment(mailssh, ssh, org, emailaddress, tmpdir): purchase = do_purchase([org.product], emailaddress)", "Version 2.0 (the \"License\"); # you may not use this", "the License. import sys if (sys.version_info >=(3, 0)): PYT3 =", "req = urllib.request.Request(urllib.parse.urljoin(support.url, '/rest/purchase'), json.dumps(params).encode('ascii'), {'Content-Type': 'application/json'}) data = json.load(urllib.request.urlopen(req))", "invoice=data['invoiceUrl'], buyerEmail=emailaddress) def check_mail(client, mailssh, purchase, mailtype): client.run('sendmail -qf') message,", "msg, headers = check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert '222,22' in", "{ 'data': [ {'items': [{'product': product} for product in products],", "# pay 333.33 SEK final_payment = pgdata3.replace('66666', '11111') # final", "json.dumps(params), {'Content-Type': 'application/json'}) data = json.load(urllib2.urlopen(req)) return Container(id=data['purchase'], invoice=data['invoiceUrl'], buyerEmail=emailaddress)", "'application/json'}) data = json.load(urllib2.urlopen(req)) return Container(id=data['purchase'], invoice=data['invoiceUrl'], buyerEmail=emailaddress) def check_mail(client,", "SEK else: partial_payment1 = pgdata1.replace('66666', '22222') # pay 222.22 SEK", "partial payments. if PYT3: partial_payment1 = pgdata1.replace(b'66666', b'22222') # pay", "subprocess import time import uuid from . import support here", "implied. # See the License for the specific language governing", "org) upload_pg(tmpdir, ssh, pgdata) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org',", "mailssh, purchase): pgdata1 = gen_pg(client, org) pgdata2 = gen_pg(client, org)", "under the Apache License, Version 2.0 (the \"License\"); # you", "= check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert '222,22' in msg #", "the fake pgfile, # so we can simply replace it", "with ssh() as client: with check_mails(client, mailssh, purchase): print(purchase.invoice) if", "client.open_sftp() sftp.put(str(pgfile), 'incoming/%s' % dest, confirm=False) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh',", "assert purchase.invoice in msg return msg, headers @contextlib.contextmanager def check_mails(client,", "import time import uuid from . import support here =", "buyerEmail=emailaddress) def check_mail(client, mailssh, purchase, mailtype): client.run('sendmail -qf') message, =", "msg return msg, headers @contextlib.contextmanager def check_mails(client, mailssh, purchase): check_mail(client,", "by applicable law or agreed to in writing, software #", "= mailssh.find_and_delete_mail(None, 'TO', purchase.buyerEmail) msg, headers = mailssh.parse(message) assert headers['X-OE-MailType']", "test_swish_payment(nodes, ssh, mailssh, org, emailaddress): #py.test.skip('Skip swish tests until certificates", "pay 222.22 SEK partial_payment2 = pgdata2.replace(b'66666', b'33333') # pay 333.33", "dest, confirm=False) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress') def", "uuid.uuid4() with ssh(username='nordea') as client: sftp = client.open_sftp() sftp.put(str(pgfile), 'incoming/%s'", "gen_pg(client, org) pgdata2 = gen_pg(client, org) pgdata3 = gen_pg(client, org)", "{'Content-Type': 'application/json'}) response = json.load(urllib.request.urlopen(req)) else: parsed = urlparse.urlparse(purchase.invoice) _,", "'mailssh', 'nodes', 'ssh', 'org', 'emailaddress') def test_swish_payment(nodes, ssh, mailssh, org,", "= '/providers/swish/poll/%s/%s' % (org.swish_provider, response['id']) if PYT3: url = urllib.parse.urlunparse((parsed.scheme,", "check_mails(client, mailssh, purchase): pgdata1 = gen_pg(client, org) pgdata2 = gen_pg(client,", "def test_swish_payment(nodes, ssh, mailssh, org, emailaddress): #py.test.skip('Skip swish tests until", "stdout, stderr = client.exec_command('PYTHONPATH=/root/accounting ' + cmd) return stdout.read() def", "SEK final_payment = pgdata3.replace('66666', '11111') # final 111.11 SEK upload_pg(tmpdir,", "= parsed.path.split('/') path = '/providers/swish/charge/%s/%s' % (org.swish_provider, purchase) url =", "{'phone': '1231181189'} req = urllib.request.Request(url, json.dumps(data).encode('ascii'), {'Content-Type': 'application/json'}) response =", "assert headers['X-OE-MailType'] == [mailtype] assert purchase.invoice in msg return msg,", "data = {'phone': '1231181189'} req = urllib.request.Request(url, json.dumps(data).encode('ascii'), {'Content-Type': 'application/json'})", "json.dumps(data).encode('ascii'), {'Content-Type': 'application/json'}) response = json.load(urllib.request.urlopen(req)) else: parsed = urlparse.urlparse(purchase.invoice)", "return Container(id=data['purchase'], invoice=data['invoiceUrl'], buyerEmail=emailaddress) def check_mail(client, mailssh, purchase, mailtype): client.run('sendmail", "(org.swish_provider, purchase) url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', ''))", "pay 222.22 SEK partial_payment2 = pgdata2.replace('66666', '33333') # pay 333.33", "'partial-payment-confirmation') assert '333,33' in msg # amount paid assert '111,11'", "'/rest/purchase'), json.dumps(params), {'Content-Type': 'application/json'}) data = json.load(urllib2.urlopen(req)) return Container(id=data['purchase'], invoice=data['invoiceUrl'],", "test_full_plusgiro_payment(mailssh, ssh, org, emailaddress, tmpdir): purchase = do_purchase([org.product], emailaddress) with", "id_args[0], id_args[1]) id_args[0] += 1 id_args[1] += 1000 stdin, stdout,", "ssh, final_payment) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'nodes', 'ssh', 'org', 'emailaddress')", "id_args[0] += 1 id_args[1] += 1000 stdin, stdout, stderr =", "mailssh, org, emailaddress): #py.test.skip('Skip swish tests until certificates work') purchase", "mailssh, purchase, 'order-confirmation') yield check_mail(client, mailssh, purchase, 'full-payment-confirmation') def gen_pg(client,", "urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) data = {'phone': '1231181189'}", "mailssh.find_and_delete_mail(None, 'TO', purchase.buyerEmail) msg, headers = mailssh.parse(message) assert headers['X-OE-MailType'] ==", "'', '', '')) data = {'phone': '1231181189'} req = urllib2.Request(url,", "msg, headers = mailssh.parse(message) assert headers['X-OE-MailType'] == [mailtype] assert purchase.invoice", "org, id_args=[1, 1]): cmd = 'python /root/accounting/members/paymentgen.py %s %s %s'", "= pgdata1.replace(b'66666', b'22222') # pay 222.22 SEK partial_payment2 = pgdata2.replace(b'66666',", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "Unless required by applicable law or agreed to in writing,", "gen_pg(client, org) pgdata3 = gen_pg(client, org) # The sum is", "parsed.netloc, path, '', '', '')) for _ in range(20): if", "fake pgfile, # so we can simply replace it in", "the specific language governing permissions and # limitations under the", "purchase, 'partial-payment-confirmation') assert '222,22' in msg # amount paid assert", "paid assert '444,44' in msg # amount remaining upload_pg(tmpdir, ssh,", "% (org.swish_provider, purchase) url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '',", "applicable law or agreed to in writing, software # distributed", "'1231181189'} req = urllib.request.Request(url, json.dumps(data).encode('ascii'), {'Content-Type': 'application/json'}) response = json.load(urllib.request.urlopen(req))", "gen_pg(client, org, id_args=[1, 1]): cmd = 'python /root/accounting/members/paymentgen.py %s %s", "if (sys.version_info >=(3, 0)): PYT3 = True import urllib.request import", "'<NAME>', 'buyerEmail': emailaddress} ] } if PYT3: req = urllib.request.Request(urllib.parse.urljoin(support.url,", "tmpdir.join('pgfile') pgfile.write(pgdata) dest = uuid.uuid4() with ssh(username='nordea') as client: sftp", "from . import support here = os.path.dirname(__file__) class Container(object): def", "'')) else: url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', ''))", "in writing, software # distributed under the License is distributed", "with check_mails(client, mailssh, purchase): pgdata = gen_pg(client, org) upload_pg(tmpdir, ssh,", "urllib.request.Request(url) response = json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(url) response =", "ssh, partial_payment1) msg, headers = check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert", "limitations under the License. import sys if (sys.version_info >=(3, 0)):", "id_args[1]) id_args[0] += 1 id_args[1] += 1000 stdin, stdout, stderr", "1 id_args[1] += 1000 stdin, stdout, stderr = client.exec_command('PYTHONPATH=/root/accounting '", "partial_payment1 = pgdata1.replace('66666', '22222') # pay 222.22 SEK partial_payment2 =", "PYT3: parsed = urllib.parse.urlparse(purchase.invoice) _, _, purchase, _ = parsed.path.split('/')", "purchase, _ = parsed.path.split('/') path = '/providers/swish/charge/%s/%s' % (org.swish_provider, purchase)", "SEK partial_payment2 = pgdata2.replace(b'66666', b'33333') # pay 333.33 SEK final_payment", "(org.swish_provider, response['id']) if PYT3: url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '',", "False import urllib2 import urlparse import contextlib import json import", "json.dumps(params).encode('ascii'), {'Content-Type': 'application/json'}) data = json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(urlparse.urljoin(support.url,", "= os.path.dirname(__file__) class Container(object): def __init__(self, **kw): self.__dict__.update(kw) def do_purchase(products,", "SEK upload_pg(tmpdir, ssh, partial_payment1) msg, headers = check_mail(client, mailssh, purchase,", "'333,33' in msg # amount paid assert '111,11' in msg", "'', '', '')) else: url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '',", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "client.exec_command('PYTHONPATH=/root/accounting ' + cmd) return stdout.read() def upload_pg(tmpdir, ssh, pgdata):", "Open End AB # # Licensed under the Apache License,", "( org.id, id_args[0], id_args[1]) id_args[0] += 1 id_args[1] += 1000", "License, Version 2.0 (the \"License\"); # you may not use", "import support here = os.path.dirname(__file__) class Container(object): def __init__(self, **kw):", "PYT3: partial_payment1 = pgdata1.replace(b'66666', b'22222') # pay 222.22 SEK partial_payment2", "0)): PYT3 = True import urllib.request import urllib.parse else: PYT3", "# You may obtain a copy of the License at", "b'11111') # final 111.11 SEK else: partial_payment1 = pgdata1.replace('66666', '22222')", "yield check_mail(client, mailssh, purchase, 'full-payment-confirmation') def gen_pg(client, org, id_args=[1, 1]):", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "66666 (öre). It is probably unique in the fake pgfile,", "purchase = do_purchase([org.product], emailaddress) with ssh() as client: with check_mails(client,", "PYT3: req = urllib.request.Request(urllib.parse.urljoin(support.url, '/rest/purchase'), json.dumps(params).encode('ascii'), {'Content-Type': 'application/json'}) data =", "print(purchase.invoice) if PYT3: parsed = urllib.parse.urlparse(purchase.invoice) _, _, purchase, _", "def __init__(self, **kw): self.__dict__.update(kw) def do_purchase(products, emailaddress): params = {", "pgfile, # so we can simply replace it in order", "'', '')) else: url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '',", "products], 'buyerName': '<NAME>', 'buyerEmail': emailaddress} ] } if PYT3: req", "# amount paid assert '444,44' in msg # amount remaining", "headers = mailssh.parse(message) assert headers['X-OE-MailType'] == [mailtype] assert purchase.invoice in", "import uuid from . import support here = os.path.dirname(__file__) class", "in msg return msg, headers @contextlib.contextmanager def check_mails(client, mailssh, purchase):", "message, = mailssh.find_and_delete_mail(None, 'TO', purchase.buyerEmail) msg, headers = mailssh.parse(message) assert", "pgdata = gen_pg(client, org) upload_pg(tmpdir, ssh, pgdata) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped',", "'33333') # pay 333.33 SEK final_payment = pgdata3.replace('66666', '11111') #", "the License for the specific language governing permissions and #", "upload_pg(tmpdir, ssh, pgdata) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress')", "emailaddress): params = { 'data': [ {'items': [{'product': product} for", "stdin, stdout, stderr = client.exec_command('PYTHONPATH=/root/accounting ' + cmd) return stdout.read()", "Apache License, Version 2.0 (the \"License\"); # you may not", "'TO', purchase.buyerEmail) msg, headers = mailssh.parse(message) assert headers['X-OE-MailType'] == [mailtype]", "either express or implied. # See the License for the", "partial_payment2 = pgdata2.replace('66666', '33333') # pay 333.33 SEK final_payment =", "under the License. import sys if (sys.version_info >=(3, 0)): PYT3", "'python /root/accounting/members/paymentgen.py %s %s %s' % ( org.id, id_args[0], id_args[1])", "{'Content-Type': 'application/json'}) data = json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(urlparse.urljoin(support.url, '/rest/purchase'),", "'full-payment-confirmation') def gen_pg(client, org, id_args=[1, 1]): cmd = 'python /root/accounting/members/paymentgen.py", "final_payment = pgdata3.replace('66666', '11111') # final 111.11 SEK upload_pg(tmpdir, ssh,", "path = '/providers/swish/charge/%s/%s' % (org.swish_provider, purchase) url = urlparse.urlunparse((parsed.scheme, parsed.netloc,", "req = urllib2.Request(url, json.dumps(data), {'Content-Type': 'application/json'}) response = json.load(urllib2.urlopen(req)) print(response)", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "path = '/providers/swish/charge/%s/%s' % (org.swish_provider, purchase) url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc,", "def test_partial_plusgiro_payment(ssh, mailssh, org, emailaddress, tmpdir): purchase = do_purchase([org.product], emailaddress)", "mailssh, purchase, 'full-payment-confirmation') def gen_pg(client, org, id_args=[1, 1]): cmd =", "'emailaddress') def test_full_plusgiro_payment(mailssh, ssh, org, emailaddress, tmpdir): purchase = do_purchase([org.product],", "amount paid assert '444,44' in msg # amount remaining upload_pg(tmpdir,", "ssh, pgdata) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress') def", "pay 333.33 SEK final_payment = pgdata3.replace('66666', '11111') # final 111.11", "mailssh, purchase): check_mail(client, mailssh, purchase, 'order-confirmation') yield check_mail(client, mailssh, purchase,", "It is probably unique in the fake pgfile, # so", "can simply replace it in order to make partial payments.", "product in products], 'buyerName': '<NAME>', 'buyerEmail': emailaddress} ] } if", "pgdata3 = gen_pg(client, org) # The sum is 66666 (öre).", "ssh, org, emailaddress, tmpdir): purchase = do_purchase([org.product], emailaddress) with ssh()", "stdout.read() def upload_pg(tmpdir, ssh, pgdata): pgfile = tmpdir.join('pgfile') pgfile.write(pgdata) dest", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "if PYT3: partial_payment1 = pgdata1.replace(b'66666', b'22222') # pay 222.22 SEK", "'org', 'emailaddress') def test_full_plusgiro_payment(mailssh, ssh, org, emailaddress, tmpdir): purchase =", "range(20): if PYT3: req = urllib.request.Request(url) response = json.load(urllib.request.urlopen(req)) else:", "= pgdata2.replace(b'66666', b'33333') # pay 333.33 SEK final_payment = pgdata3.replace(b'66666',", "urllib.parse else: PYT3 = False import urllib2 import urlparse import", "urllib.request.Request(urllib.parse.urljoin(support.url, '/rest/purchase'), json.dumps(params).encode('ascii'), {'Content-Type': 'application/json'}) data = json.load(urllib.request.urlopen(req)) else: req", "so we can simply replace it in order to make", "= urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) data = {'phone':", "import urllib2 import urlparse import contextlib import json import os", "headers['X-OE-MailType'] == [mailtype] assert purchase.invoice in msg return msg, headers", "= uuid.uuid4() with ssh(username='nordea') as client: sftp = client.open_sftp() sftp.put(str(pgfile),", "mailssh, purchase): pgdata = gen_pg(client, org) upload_pg(tmpdir, ssh, pgdata) @py.test.mark.usefixtures('cluster',", "emailaddress) with ssh() as client: with check_mails(client, mailssh, purchase): pgdata", "in order to make partial payments. if PYT3: partial_payment1 =", "PYT3: url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) else:", "msg # amount paid assert '111,11' in msg # amount", "and # limitations under the License. import sys if (sys.version_info", "@py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'ssh', 'org', 'emailaddress') def test_partial_plusgiro_payment(ssh, mailssh,", "= urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) else: url =", "\"License\"); # you may not use this file except in", "coding: utf-8 -*- # Copyright 2019 Open End AB #", "parsed.path.split('/') path = '/providers/swish/charge/%s/%s' % (org.swish_provider, purchase) url = urlparse.urlunparse((parsed.scheme,", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "-*- # Copyright 2019 Open End AB # # Licensed", "= gen_pg(client, org) pgdata2 = gen_pg(client, org) pgdata3 = gen_pg(client,", "_, _, purchase, _ = parsed.path.split('/') path = '/providers/swish/charge/%s/%s' %", "(sys.version_info >=(3, 0)): PYT3 = True import urllib.request import urllib.parse", "= True import urllib.request import urllib.parse else: PYT3 = False", "in msg # amount paid assert '444,44' in msg #", "response = json.load(urllib.request.urlopen(req)) else: req = urllib2.Request(url) response = json.load(urllib2.urlopen(req))", "# distributed under the License is distributed on an \"AS", "json.load(urllib2.urlopen(req)) print(response) assert response['status'] == 'CREATED' path = '/providers/swish/poll/%s/%s' %", "headers @contextlib.contextmanager def check_mails(client, mailssh, purchase): check_mail(client, mailssh, purchase, 'order-confirmation')", "# Unless required by applicable law or agreed to in", "urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) else: url = urlparse.urlunparse((parsed.scheme,", "make partial payments. if PYT3: partial_payment1 = pgdata1.replace(b'66666', b'22222') #", "permissions and # limitations under the License. import sys if", "= do_purchase([org.product], emailaddress) with ssh() as client: with check_mails(client, mailssh,", "msg, headers = check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert '333,33' in", "for _ in range(20): if PYT3: req = urllib.request.Request(url) response", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "[ {'items': [{'product': product} for product in products], 'buyerName': '<NAME>',", "urllib.parse.urlparse(purchase.invoice) _, _, purchase, _ = parsed.path.split('/') path = '/providers/swish/charge/%s/%s'", "partial_payment2) msg, headers = check_mail(client, mailssh, purchase, 'partial-payment-confirmation') assert '333,33'", "You may obtain a copy of the License at #", "= gen_pg(client, org) upload_pg(tmpdir, ssh, pgdata) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh',", "final 111.11 SEK else: partial_payment1 = pgdata1.replace('66666', '22222') # pay", "for product in products], 'buyerName': '<NAME>', 'buyerEmail': emailaddress} ] }", "{'phone': '1231181189'} req = urllib2.Request(url, json.dumps(data), {'Content-Type': 'application/json'}) response =", "= '/providers/swish/charge/%s/%s' % (org.swish_provider, purchase) url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path,", "'application/json'}) response = json.load(urllib2.urlopen(req)) print(response) assert response['status'] == 'CREATED' path", "def gen_pg(client, org, id_args=[1, 1]): cmd = 'python /root/accounting/members/paymentgen.py %s", "SEK partial_payment2 = pgdata2.replace('66666', '33333') # pay 333.33 SEK final_payment", "upload_pg(tmpdir, ssh, final_payment) @py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh', 'nodes', 'ssh', 'org',", "dest = uuid.uuid4() with ssh(username='nordea') as client: sftp = client.open_sftp()", "purchase) url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path, '', '', '')) data", "the Apache License, Version 2.0 (the \"License\"); # you may", "msg # amount remaining upload_pg(tmpdir, ssh, partial_payment2) msg, headers =", "= False import urllib2 import urlparse import contextlib import json", "} if PYT3: req = urllib.request.Request(urllib.parse.urljoin(support.url, '/rest/purchase'), json.dumps(params).encode('ascii'), {'Content-Type': 'application/json'})", "client: sftp = client.open_sftp() sftp.put(str(pgfile), 'incoming/%s' % dest, confirm=False) @py.test.mark.usefixtures('cluster'," ]
[ "2020 InfAI (CC SES) Licensed under the Apache License, Version", "self.__mqtt.publish( \"{}/{}\".format(conf.MQTTClient.fog_processes_pub_topic, envelope.sub_topic), envelope.message, qos=conf.MQTTClient.qos ) except Exception as ex:", "Unless required by applicable law or agreed to in writing,", "by applicable law or agreed to in writing, software distributed", "import conf, get_logger, mqtt import threading import cc_lib logger =", "software distributed under the License is distributed on an \"AS", "distributed under the License is distributed on an \"AS IS\"", "Copyright 2020 InfAI (CC SES) Licensed under the Apache License,", "\"{}/{}\".format(conf.MQTTClient.fog_processes_pub_topic, envelope.sub_topic), envelope.message, qos=conf.MQTTClient.qos ) except Exception as ex: logger.error(ex)", "cc_lib.client.Client, mqtt_client: mqtt.Client): super().__init__(name=\"downstream-fog-processes-router\", daemon=True) self.__cc = client self.__mqtt =", "CONDITIONS OF ANY KIND, either express or implied. See the", "Version 2.0 (the \"License\"); you may not use this file", "mqtt.Client): super().__init__(name=\"downstream-fog-processes-router\", daemon=True) self.__cc = client self.__mqtt = mqtt_client def", "writing, software distributed under the License is distributed on an", "SES) Licensed under the Apache License, Version 2.0 (the \"License\");", "self.__cc.receive_fog_processes() logger.debug(envelope) self.__mqtt.publish( \"{}/{}\".format(conf.MQTTClient.fog_processes_pub_topic, envelope.sub_topic), envelope.message, qos=conf.MQTTClient.qos ) except Exception", "limitations under the License. \"\"\" __all__ = (\"Router\", ) from", "Router(threading.Thread): def __init__(self, client: cc_lib.client.Client, mqtt_client: mqtt.Client): super().__init__(name=\"downstream-fog-processes-router\", daemon=True) self.__cc", "__init__(self, client: cc_lib.client.Client, mqtt_client: mqtt.Client): super().__init__(name=\"downstream-fog-processes-router\", daemon=True) self.__cc = client", "not use this file except in compliance with the License.", "2.0 (the \"License\"); you may not use this file except", "Apache License, Version 2.0 (the \"License\"); you may not use", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "express or implied. See the License for the specific language", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "in compliance with the License. You may obtain a copy", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "you may not use this file except in compliance with", "\"\"\" __all__ = (\"Router\", ) from ..util import conf, get_logger,", ") from ..util import conf, get_logger, mqtt import threading import", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "the License. You may obtain a copy of the License", "agreed to in writing, software distributed under the License is", "from ..util import conf, get_logger, mqtt import threading import cc_lib", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "cc_lib logger = get_logger(__name__.split(\".\", 1)[-1]) class Router(threading.Thread): def __init__(self, client:", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "use this file except in compliance with the License. You", "License. \"\"\" __all__ = (\"Router\", ) from ..util import conf,", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "ANY KIND, either express or implied. See the License for", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "mqtt_client: mqtt.Client): super().__init__(name=\"downstream-fog-processes-router\", daemon=True) self.__cc = client self.__mqtt = mqtt_client", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "= get_logger(__name__.split(\".\", 1)[-1]) class Router(threading.Thread): def __init__(self, client: cc_lib.client.Client, mqtt_client:", "import cc_lib logger = get_logger(__name__.split(\".\", 1)[-1]) class Router(threading.Thread): def __init__(self,", "permissions and limitations under the License. \"\"\" __all__ = (\"Router\",", "either express or implied. See the License for the specific", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "under the License. \"\"\" __all__ = (\"Router\", ) from ..util", "mqtt import threading import cc_lib logger = get_logger(__name__.split(\".\", 1)[-1]) class", "super().__init__(name=\"downstream-fog-processes-router\", daemon=True) self.__cc = client self.__mqtt = mqtt_client def run(self)", "under the License is distributed on an \"AS IS\" BASIS,", "\"License\"); you may not use this file except in compliance", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "logger = get_logger(__name__.split(\".\", 1)[-1]) class Router(threading.Thread): def __init__(self, client: cc_lib.client.Client,", "with the License. You may obtain a copy of the", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "License for the specific language governing permissions and limitations under", "logger.debug(envelope) self.__mqtt.publish( \"{}/{}\".format(conf.MQTTClient.fog_processes_pub_topic, envelope.sub_topic), envelope.message, qos=conf.MQTTClient.qos ) except Exception as", "None: try: while True: envelope = self.__cc.receive_fog_processes() logger.debug(envelope) self.__mqtt.publish( \"{}/{}\".format(conf.MQTTClient.fog_processes_pub_topic,", "try: while True: envelope = self.__cc.receive_fog_processes() logger.debug(envelope) self.__mqtt.publish( \"{}/{}\".format(conf.MQTTClient.fog_processes_pub_topic, envelope.sub_topic),", "client: cc_lib.client.Client, mqtt_client: mqtt.Client): super().__init__(name=\"downstream-fog-processes-router\", daemon=True) self.__cc = client self.__mqtt", "this file except in compliance with the License. You may", "daemon=True) self.__cc = client self.__mqtt = mqtt_client def run(self) ->", "specific language governing permissions and limitations under the License. \"\"\"", "(the \"License\"); you may not use this file except in", "__all__ = (\"Router\", ) from ..util import conf, get_logger, mqtt", "class Router(threading.Thread): def __init__(self, client: cc_lib.client.Client, mqtt_client: mqtt.Client): super().__init__(name=\"downstream-fog-processes-router\", daemon=True)", "applicable law or agreed to in writing, software distributed under", "get_logger, mqtt import threading import cc_lib logger = get_logger(__name__.split(\".\", 1)[-1])", "-> None: try: while True: envelope = self.__cc.receive_fog_processes() logger.debug(envelope) self.__mqtt.publish(", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "while True: envelope = self.__cc.receive_fog_processes() logger.debug(envelope) self.__mqtt.publish( \"{}/{}\".format(conf.MQTTClient.fog_processes_pub_topic, envelope.sub_topic), envelope.message,", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "the specific language governing permissions and limitations under the License.", "= self.__cc.receive_fog_processes() logger.debug(envelope) self.__mqtt.publish( \"{}/{}\".format(conf.MQTTClient.fog_processes_pub_topic, envelope.sub_topic), envelope.message, qos=conf.MQTTClient.qos ) except", "import threading import cc_lib logger = get_logger(__name__.split(\".\", 1)[-1]) class Router(threading.Thread):", "self.__mqtt = mqtt_client def run(self) -> None: try: while True:", "threading import cc_lib logger = get_logger(__name__.split(\".\", 1)[-1]) class Router(threading.Thread): def", "= client self.__mqtt = mqtt_client def run(self) -> None: try:", "the Apache License, Version 2.0 (the \"License\"); you may not", "file except in compliance with the License. You may obtain", "except in compliance with the License. You may obtain a", "or implied. See the License for the specific language governing", "KIND, either express or implied. See the License for the", "to in writing, software distributed under the License is distributed", "or agreed to in writing, software distributed under the License", "get_logger(__name__.split(\".\", 1)[-1]) class Router(threading.Thread): def __init__(self, client: cc_lib.client.Client, mqtt_client: mqtt.Client):", "law or agreed to in writing, software distributed under the", "OR CONDITIONS OF ANY KIND, either express or implied. See", "compliance with the License. You may obtain a copy of", "(\"Router\", ) from ..util import conf, get_logger, mqtt import threading", "OF ANY KIND, either express or implied. See the License", "under the Apache License, Version 2.0 (the \"License\"); you may", "the License. \"\"\" __all__ = (\"Router\", ) from ..util import", "True: envelope = self.__cc.receive_fog_processes() logger.debug(envelope) self.__mqtt.publish( \"{}/{}\".format(conf.MQTTClient.fog_processes_pub_topic, envelope.sub_topic), envelope.message, qos=conf.MQTTClient.qos", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "(CC SES) Licensed under the Apache License, Version 2.0 (the", "def run(self) -> None: try: while True: envelope = self.__cc.receive_fog_processes()", "run(self) -> None: try: while True: envelope = self.__cc.receive_fog_processes() logger.debug(envelope)", "License, Version 2.0 (the \"License\"); you may not use this", "= mqtt_client def run(self) -> None: try: while True: envelope", "1)[-1]) class Router(threading.Thread): def __init__(self, client: cc_lib.client.Client, mqtt_client: mqtt.Client): super().__init__(name=\"downstream-fog-processes-router\",", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "mqtt_client def run(self) -> None: try: while True: envelope =", "for the specific language governing permissions and limitations under the", "See the License for the specific language governing permissions and", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "client self.__mqtt = mqtt_client def run(self) -> None: try: while", "conf, get_logger, mqtt import threading import cc_lib logger = get_logger(__name__.split(\".\",", "self.__cc = client self.__mqtt = mqtt_client def run(self) -> None:", "License. You may obtain a copy of the License at", "the License for the specific language governing permissions and limitations", "\"\"\" Copyright 2020 InfAI (CC SES) Licensed under the Apache", "may not use this file except in compliance with the", "InfAI (CC SES) Licensed under the Apache License, Version 2.0", "in writing, software distributed under the License is distributed on", "required by applicable law or agreed to in writing, software", "governing permissions and limitations under the License. \"\"\" __all__ =", "implied. See the License for the specific language governing permissions", "..util import conf, get_logger, mqtt import threading import cc_lib logger", "and limitations under the License. \"\"\" __all__ = (\"Router\", )", "envelope = self.__cc.receive_fog_processes() logger.debug(envelope) self.__mqtt.publish( \"{}/{}\".format(conf.MQTTClient.fog_processes_pub_topic, envelope.sub_topic), envelope.message, qos=conf.MQTTClient.qos )", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "language governing permissions and limitations under the License. \"\"\" __all__", "= (\"Router\", ) from ..util import conf, get_logger, mqtt import", "def __init__(self, client: cc_lib.client.Client, mqtt_client: mqtt.Client): super().__init__(name=\"downstream-fog-processes-router\", daemon=True) self.__cc =" ]
[ "on 2021-05-27 13:34 from django.db import migrations, models class Migration(migrations.Migration):", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('user_profile', '0002_auto_20210526_1747'),", "by Django 3.2.3 on 2021-05-27 13:34 from django.db import migrations,", "[ ('user_profile', '0002_auto_20210526_1747'), ] operations = [ migrations.AddField( model_name='order', name='payment_method',", "Migration(migrations.Migration): dependencies = [ ('user_profile', '0002_auto_20210526_1747'), ] operations = [", "('user_profile', '0002_auto_20210526_1747'), ] operations = [ migrations.AddField( model_name='order', name='payment_method', field=models.CharField(choices=[('cash',", "models class Migration(migrations.Migration): dependencies = [ ('user_profile', '0002_auto_20210526_1747'), ] operations", "migrations.AddField( model_name='order', name='payment_method', field=models.CharField(choices=[('cash', 'cash'), ('wallet', 'wallet')], default='cash', max_length=10), ),", "[ migrations.AddField( model_name='order', name='payment_method', field=models.CharField(choices=[('cash', 'cash'), ('wallet', 'wallet')], default='cash', max_length=10),", "= [ migrations.AddField( model_name='order', name='payment_method', field=models.CharField(choices=[('cash', 'cash'), ('wallet', 'wallet')], default='cash',", "migrations, models class Migration(migrations.Migration): dependencies = [ ('user_profile', '0002_auto_20210526_1747'), ]", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "model_name='order', name='payment_method', field=models.CharField(choices=[('cash', 'cash'), ('wallet', 'wallet')], default='cash', max_length=10), ), ]", "'0002_auto_20210526_1747'), ] operations = [ migrations.AddField( model_name='order', name='payment_method', field=models.CharField(choices=[('cash', 'cash'),", "3.2.3 on 2021-05-27 13:34 from django.db import migrations, models class", "dependencies = [ ('user_profile', '0002_auto_20210526_1747'), ] operations = [ migrations.AddField(", "] operations = [ migrations.AddField( model_name='order', name='payment_method', field=models.CharField(choices=[('cash', 'cash'), ('wallet',", "Django 3.2.3 on 2021-05-27 13:34 from django.db import migrations, models", "2021-05-27 13:34 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "operations = [ migrations.AddField( model_name='order', name='payment_method', field=models.CharField(choices=[('cash', 'cash'), ('wallet', 'wallet')],", "# Generated by Django 3.2.3 on 2021-05-27 13:34 from django.db", "Generated by Django 3.2.3 on 2021-05-27 13:34 from django.db import", "13:34 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "= [ ('user_profile', '0002_auto_20210526_1747'), ] operations = [ migrations.AddField( model_name='order',", "class Migration(migrations.Migration): dependencies = [ ('user_profile', '0002_auto_20210526_1747'), ] operations =", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('user_profile'," ]
[ "TestCheckingTLSAddresses(unittest.TestCase): cases = ( ('abba[mnop]qrst', True), ('abcd[bddb]xyyx', False), ('aaaa[qwer]tyui', False),", "def test_finds_tls_addresses(self): for text, expected in self.cases: self.assertEqual(supports_tls(text), expected) def", "'qrst', 'defg']), ) def test_finds_allowed_substrings(self): for text, expected in self.cases:", "False), ) def test_finds_abba_sequences(self): for text, expected in self.cases: self.assertEqual(has_abba(text),", "('abcd[bddb]xyyx', False), ('aaaa[qwer]tyui', False), ('ioxxoj[asdfgh]zxcvbn', True), ) def test_finds_tls_addresses(self): for", "= [x[0] for x in self.cases] self.assertEqual(count_tls_addresses(data), 2) class TestFindingABASequences(unittest.TestCase):", "test_finds_allowed_substrings(self): for text, expected in self.cases: self.assertEqual(get_abba_allowed_strings(text), expected) class TestGettingDisallowedChunks(unittest.TestCase):", "for text, expected in self.cases: self.assertEqual(supports_tls(text), expected) def test_counts_tls_addresses(self): data", "TestGettingDisallowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg', ['mnop', 'abcd']), ) def test_finds_disallowed_substrings(self):", "expected) class TestCheckingTLSAddresses(unittest.TestCase): cases = ( ('abba[mnop]qrst', True), ('abcd[bddb]xyyx', False),", "cases = ( ('aba', ['aba']), ('xyxxyx', ['xyx']), ('aaakekeke', ['eke', 'kek']),", "def test_finds_disallowed_substrings(self): for text, expected in self.cases: self.assertEqual(get_abba_disallowed_strings(text), expected) class", "x in self.cases] self.assertEqual(count_tls_addresses(data), 2) class TestFindingABASequences(unittest.TestCase): cases = (", "'kek']), ('zazbzbzbcdb', ['bzb', 'zaz', 'zbz']), ) def test_finds_aba_sequences(self): for text,", "['mnop', 'abcd']), ) def test_finds_disallowed_substrings(self): for text, expected in self.cases:", "( ('aba', ['aba']), ('xyxxyx', ['xyx']), ('aaakekeke', ['eke', 'kek']), ('zazbzbzbcdb', ['bzb',", "TestFindingABBASequences(unittest.TestCase): cases = ( ('abba', True), ('oxyyxo', True), ('aaaa', False),", "('abcd', False), ) def test_finds_abba_sequences(self): for text, expected in self.cases:", "from day07 import has_abba, get_abba_allowed_strings, get_abba_disallowed_strings from day07 import supports_tls,", "True), ) def test_finds_tls_addresses(self): for text, expected in self.cases: self.assertEqual(supports_tls(text),", "expected in self.cases: self.assertEqual(find_abas(text), expected) class TestCheckingSSLAddresses(unittest.TestCase): cases = (", "('xyxxyx', ['xyx']), ('aaakekeke', ['eke', 'kek']), ('zazbzbzbcdb', ['bzb', 'zaz', 'zbz']), )", "supports_tls, count_tls_addresses from day07 import find_abas, supports_ssl, count_ssl_addresses class TestFindingABBASequences(unittest.TestCase):", "for x in self.cases] self.assertEqual(count_ssl_addresses(data), 3) if __name__ == '__main__':", "def test_finds_abba_sequences(self): for text, expected in self.cases: self.assertEqual(has_abba(text), expected) class", "True), ('oxyyxo', True), ('aaaa', False), ('abcd', False), ) def test_finds_abba_sequences(self):", "for text, expected in self.cases: self.assertEqual(has_abba(text), expected) class TestGettingAllowedChunks(unittest.TestCase): cases", "self.assertEqual(get_abba_allowed_strings(text), expected) class TestGettingDisallowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg', ['mnop', 'abcd']),", "self.cases: self.assertEqual(find_abas(text), expected) class TestCheckingSSLAddresses(unittest.TestCase): cases = ( ('aba[bab]xyz', True),", "= ( ('aba[bab]xyz', True), ('xyx[xyx]xyx', False), ('aaa[kek]eke', True), ('zazbz[bzb]cdb', True),", "True), ('abcd[bddb]xyyx', False), ('aaaa[qwer]tyui', False), ('ioxxoj[asdfgh]zxcvbn', True), ) def test_finds_tls_addresses(self):", "'defg']), ) def test_finds_allowed_substrings(self): for text, expected in self.cases: self.assertEqual(get_abba_allowed_strings(text),", "def test_counts_ssl_addresses(self): data = [x[0] for x in self.cases] self.assertEqual(count_ssl_addresses(data),", "import unittest from day07 import has_abba, get_abba_allowed_strings, get_abba_disallowed_strings from day07", "('aaaa', False), ('abcd', False), ) def test_finds_abba_sequences(self): for text, expected", ") def test_finds_allowed_substrings(self): for text, expected in self.cases: self.assertEqual(get_abba_allowed_strings(text), expected)", "self.assertEqual(count_tls_addresses(data), 2) class TestFindingABASequences(unittest.TestCase): cases = ( ('aba', ['aba']), ('xyxxyx',", "in self.cases: self.assertEqual(supports_ssl(text), expected) def test_counts_ssl_addresses(self): data = [x[0] for", "'zaz', 'zbz']), ) def test_finds_aba_sequences(self): for text, expected in self.cases:", "test_counts_ssl_addresses(self): data = [x[0] for x in self.cases] self.assertEqual(count_ssl_addresses(data), 3)", "expected in self.cases: self.assertEqual(supports_ssl(text), expected) def test_counts_ssl_addresses(self): data = [x[0]", "('aaa[kek]eke', True), ('zazbz[bzb]cdb', True), ) def test_finds_ssl_addresses(self): for text, expected", "import find_abas, supports_ssl, count_ssl_addresses class TestFindingABBASequences(unittest.TestCase): cases = ( ('abba',", "class TestGettingDisallowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg', ['mnop', 'abcd']), ) def", "x in self.cases] self.assertEqual(count_ssl_addresses(data), 3) if __name__ == '__main__': unittest.main()", "from day07 import find_abas, supports_ssl, count_ssl_addresses class TestFindingABBASequences(unittest.TestCase): cases =", "self.cases: self.assertEqual(supports_tls(text), expected) def test_counts_tls_addresses(self): data = [x[0] for x", "('oxyyxo', True), ('aaaa', False), ('abcd', False), ) def test_finds_abba_sequences(self): for", "in self.cases] self.assertEqual(count_tls_addresses(data), 2) class TestFindingABASequences(unittest.TestCase): cases = ( ('aba',", "in self.cases: self.assertEqual(get_abba_disallowed_strings(text), expected) class TestCheckingTLSAddresses(unittest.TestCase): cases = ( ('abba[mnop]qrst',", "= ( ('abba', True), ('oxyyxo', True), ('aaaa', False), ('abcd', False),", "cases = ( ('abba', True), ('oxyyxo', True), ('aaaa', False), ('abcd',", "self.cases: self.assertEqual(supports_ssl(text), expected) def test_counts_ssl_addresses(self): data = [x[0] for x", "for text, expected in self.cases: self.assertEqual(get_abba_disallowed_strings(text), expected) class TestCheckingTLSAddresses(unittest.TestCase): cases", "['eke', 'kek']), ('zazbzbzbcdb', ['bzb', 'zaz', 'zbz']), ) def test_finds_aba_sequences(self): for", "def test_counts_tls_addresses(self): data = [x[0] for x in self.cases] self.assertEqual(count_tls_addresses(data),", "text, expected in self.cases: self.assertEqual(has_abba(text), expected) class TestGettingAllowedChunks(unittest.TestCase): cases =", "TestFindingABASequences(unittest.TestCase): cases = ( ('aba', ['aba']), ('xyxxyx', ['xyx']), ('aaakekeke', ['eke',", "for text, expected in self.cases: self.assertEqual(get_abba_allowed_strings(text), expected) class TestGettingDisallowedChunks(unittest.TestCase): cases", "['bzb', 'zaz', 'zbz']), ) def test_finds_aba_sequences(self): for text, expected in", "('zazbz[bzb]cdb', True), ) def test_finds_ssl_addresses(self): for text, expected in self.cases:", "('aaaa[qwer]tyui', False), ('ioxxoj[asdfgh]zxcvbn', True), ) def test_finds_tls_addresses(self): for text, expected", "find_abas, supports_ssl, count_ssl_addresses class TestFindingABBASequences(unittest.TestCase): cases = ( ('abba', True),", "for text, expected in self.cases: self.assertEqual(supports_ssl(text), expected) def test_counts_ssl_addresses(self): data", "('abba[mnop]qrst', True), ('abcd[bddb]xyyx', False), ('aaaa[qwer]tyui', False), ('ioxxoj[asdfgh]zxcvbn', True), ) def", "= ( ('abba[mnop]qrst[abcd]defg', ['abba', 'qrst', 'defg']), ) def test_finds_allowed_substrings(self): for", "expected) class TestGettingDisallowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg', ['mnop', 'abcd']), )", "self.assertEqual(supports_tls(text), expected) def test_counts_tls_addresses(self): data = [x[0] for x in", "test_finds_ssl_addresses(self): for text, expected in self.cases: self.assertEqual(supports_ssl(text), expected) def test_counts_ssl_addresses(self):", "['abba', 'qrst', 'defg']), ) def test_finds_allowed_substrings(self): for text, expected in", "expected in self.cases: self.assertEqual(has_abba(text), expected) class TestGettingAllowedChunks(unittest.TestCase): cases = (", "self.cases: self.assertEqual(get_abba_allowed_strings(text), expected) class TestGettingDisallowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg', ['mnop',", "#!/usr/bin/env python import unittest from day07 import has_abba, get_abba_allowed_strings, get_abba_disallowed_strings", "python import unittest from day07 import has_abba, get_abba_allowed_strings, get_abba_disallowed_strings from", "self.cases: self.assertEqual(get_abba_disallowed_strings(text), expected) class TestCheckingTLSAddresses(unittest.TestCase): cases = ( ('abba[mnop]qrst', True),", "('xyx[xyx]xyx', False), ('aaa[kek]eke', True), ('zazbz[bzb]cdb', True), ) def test_finds_ssl_addresses(self): for", "self.assertEqual(get_abba_disallowed_strings(text), expected) class TestCheckingTLSAddresses(unittest.TestCase): cases = ( ('abba[mnop]qrst', True), ('abcd[bddb]xyyx',", "text, expected in self.cases: self.assertEqual(supports_tls(text), expected) def test_counts_tls_addresses(self): data =", "['aba']), ('xyxxyx', ['xyx']), ('aaakekeke', ['eke', 'kek']), ('zazbzbzbcdb', ['bzb', 'zaz', 'zbz']),", "day07 import supports_tls, count_tls_addresses from day07 import find_abas, supports_ssl, count_ssl_addresses", "unittest from day07 import has_abba, get_abba_allowed_strings, get_abba_disallowed_strings from day07 import", "('zazbzbzbcdb', ['bzb', 'zaz', 'zbz']), ) def test_finds_aba_sequences(self): for text, expected", "('ioxxoj[asdfgh]zxcvbn', True), ) def test_finds_tls_addresses(self): for text, expected in self.cases:", "text, expected in self.cases: self.assertEqual(get_abba_allowed_strings(text), expected) class TestGettingDisallowedChunks(unittest.TestCase): cases =", "import has_abba, get_abba_allowed_strings, get_abba_disallowed_strings from day07 import supports_tls, count_tls_addresses from", "get_abba_disallowed_strings from day07 import supports_tls, count_tls_addresses from day07 import find_abas,", "[x[0] for x in self.cases] self.assertEqual(count_tls_addresses(data), 2) class TestFindingABASequences(unittest.TestCase): cases", "self.assertEqual(supports_ssl(text), expected) def test_counts_ssl_addresses(self): data = [x[0] for x in", "expected) class TestGettingAllowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg', ['abba', 'qrst', 'defg']),", "text, expected in self.cases: self.assertEqual(get_abba_disallowed_strings(text), expected) class TestCheckingTLSAddresses(unittest.TestCase): cases =", "in self.cases: self.assertEqual(supports_tls(text), expected) def test_counts_tls_addresses(self): data = [x[0] for", "def test_finds_aba_sequences(self): for text, expected in self.cases: self.assertEqual(find_abas(text), expected) class", "in self.cases: self.assertEqual(find_abas(text), expected) class TestCheckingSSLAddresses(unittest.TestCase): cases = ( ('aba[bab]xyz',", "False), ('aaa[kek]eke', True), ('zazbz[bzb]cdb', True), ) def test_finds_ssl_addresses(self): for text,", "test_counts_tls_addresses(self): data = [x[0] for x in self.cases] self.assertEqual(count_tls_addresses(data), 2)", "class TestCheckingSSLAddresses(unittest.TestCase): cases = ( ('aba[bab]xyz', True), ('xyx[xyx]xyx', False), ('aaa[kek]eke',", "day07 import find_abas, supports_ssl, count_ssl_addresses class TestFindingABBASequences(unittest.TestCase): cases = (", "test_finds_disallowed_substrings(self): for text, expected in self.cases: self.assertEqual(get_abba_disallowed_strings(text), expected) class TestCheckingTLSAddresses(unittest.TestCase):", "from day07 import supports_tls, count_tls_addresses from day07 import find_abas, supports_ssl,", "count_ssl_addresses class TestFindingABBASequences(unittest.TestCase): cases = ( ('abba', True), ('oxyyxo', True),", "cases = ( ('abba[mnop]qrst[abcd]defg', ['mnop', 'abcd']), ) def test_finds_disallowed_substrings(self): for", "= ( ('abba[mnop]qrst', True), ('abcd[bddb]xyyx', False), ('aaaa[qwer]tyui', False), ('ioxxoj[asdfgh]zxcvbn', True),", "True), ('zazbz[bzb]cdb', True), ) def test_finds_ssl_addresses(self): for text, expected in", "('abba[mnop]qrst[abcd]defg', ['mnop', 'abcd']), ) def test_finds_disallowed_substrings(self): for text, expected in", "TestCheckingSSLAddresses(unittest.TestCase): cases = ( ('aba[bab]xyz', True), ('xyx[xyx]xyx', False), ('aaa[kek]eke', True),", "expected in self.cases: self.assertEqual(get_abba_disallowed_strings(text), expected) class TestCheckingTLSAddresses(unittest.TestCase): cases = (", "has_abba, get_abba_allowed_strings, get_abba_disallowed_strings from day07 import supports_tls, count_tls_addresses from day07", "expected in self.cases: self.assertEqual(get_abba_allowed_strings(text), expected) class TestGettingDisallowedChunks(unittest.TestCase): cases = (", "expected) def test_counts_ssl_addresses(self): data = [x[0] for x in self.cases]", "( ('abba', True), ('oxyyxo', True), ('aaaa', False), ('abcd', False), )", "False), ('aaaa[qwer]tyui', False), ('ioxxoj[asdfgh]zxcvbn', True), ) def test_finds_tls_addresses(self): for text,", "( ('abba[mnop]qrst[abcd]defg', ['abba', 'qrst', 'defg']), ) def test_finds_allowed_substrings(self): for text,", "class TestCheckingTLSAddresses(unittest.TestCase): cases = ( ('abba[mnop]qrst', True), ('abcd[bddb]xyyx', False), ('aaaa[qwer]tyui',", "self.assertEqual(has_abba(text), expected) class TestGettingAllowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg', ['abba', 'qrst',", "supports_ssl, count_ssl_addresses class TestFindingABBASequences(unittest.TestCase): cases = ( ('abba', True), ('oxyyxo',", "import supports_tls, count_tls_addresses from day07 import find_abas, supports_ssl, count_ssl_addresses class", "class TestFindingABBASequences(unittest.TestCase): cases = ( ('abba', True), ('oxyyxo', True), ('aaaa',", "('aaakekeke', ['eke', 'kek']), ('zazbzbzbcdb', ['bzb', 'zaz', 'zbz']), ) def test_finds_aba_sequences(self):", "'zbz']), ) def test_finds_aba_sequences(self): for text, expected in self.cases: self.assertEqual(find_abas(text),", "data = [x[0] for x in self.cases] self.assertEqual(count_tls_addresses(data), 2) class", "False), ('ioxxoj[asdfgh]zxcvbn', True), ) def test_finds_tls_addresses(self): for text, expected in", "test_finds_tls_addresses(self): for text, expected in self.cases: self.assertEqual(supports_tls(text), expected) def test_counts_tls_addresses(self):", "self.assertEqual(find_abas(text), expected) class TestCheckingSSLAddresses(unittest.TestCase): cases = ( ('aba[bab]xyz', True), ('xyx[xyx]xyx',", "TestGettingAllowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg', ['abba', 'qrst', 'defg']), ) def", "cases = ( ('aba[bab]xyz', True), ('xyx[xyx]xyx', False), ('aaa[kek]eke', True), ('zazbz[bzb]cdb',", "text, expected in self.cases: self.assertEqual(find_abas(text), expected) class TestCheckingSSLAddresses(unittest.TestCase): cases =", "in self.cases: self.assertEqual(get_abba_allowed_strings(text), expected) class TestGettingDisallowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg',", "def test_finds_ssl_addresses(self): for text, expected in self.cases: self.assertEqual(supports_ssl(text), expected) def", "cases = ( ('abba[mnop]qrst[abcd]defg', ['abba', 'qrst', 'defg']), ) def test_finds_allowed_substrings(self):", "'abcd']), ) def test_finds_disallowed_substrings(self): for text, expected in self.cases: self.assertEqual(get_abba_disallowed_strings(text),", "['xyx']), ('aaakekeke', ['eke', 'kek']), ('zazbzbzbcdb', ['bzb', 'zaz', 'zbz']), ) def", "expected) def test_counts_tls_addresses(self): data = [x[0] for x in self.cases]", "class TestGettingAllowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg', ['abba', 'qrst', 'defg']), )", "cases = ( ('abba[mnop]qrst', True), ('abcd[bddb]xyyx', False), ('aaaa[qwer]tyui', False), ('ioxxoj[asdfgh]zxcvbn',", "day07 import has_abba, get_abba_allowed_strings, get_abba_disallowed_strings from day07 import supports_tls, count_tls_addresses", "('aba', ['aba']), ('xyxxyx', ['xyx']), ('aaakekeke', ['eke', 'kek']), ('zazbzbzbcdb', ['bzb', 'zaz',", "= ( ('aba', ['aba']), ('xyxxyx', ['xyx']), ('aaakekeke', ['eke', 'kek']), ('zazbzbzbcdb',", "get_abba_allowed_strings, get_abba_disallowed_strings from day07 import supports_tls, count_tls_addresses from day07 import", "True), ('xyx[xyx]xyx', False), ('aaa[kek]eke', True), ('zazbz[bzb]cdb', True), ) def test_finds_ssl_addresses(self):", "( ('aba[bab]xyz', True), ('xyx[xyx]xyx', False), ('aaa[kek]eke', True), ('zazbz[bzb]cdb', True), )", "for text, expected in self.cases: self.assertEqual(find_abas(text), expected) class TestCheckingSSLAddresses(unittest.TestCase): cases", "self.cases: self.assertEqual(has_abba(text), expected) class TestGettingAllowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg', ['abba',", "test_finds_aba_sequences(self): for text, expected in self.cases: self.assertEqual(find_abas(text), expected) class TestCheckingSSLAddresses(unittest.TestCase):", ") def test_finds_ssl_addresses(self): for text, expected in self.cases: self.assertEqual(supports_ssl(text), expected)", "text, expected in self.cases: self.assertEqual(supports_ssl(text), expected) def test_counts_ssl_addresses(self): data =", "def test_finds_allowed_substrings(self): for text, expected in self.cases: self.assertEqual(get_abba_allowed_strings(text), expected) class", "= ( ('abba[mnop]qrst[abcd]defg', ['mnop', 'abcd']), ) def test_finds_disallowed_substrings(self): for text,", "test_finds_abba_sequences(self): for text, expected in self.cases: self.assertEqual(has_abba(text), expected) class TestGettingAllowedChunks(unittest.TestCase):", "<reponame>mpirnat/aoc2016 #!/usr/bin/env python import unittest from day07 import has_abba, get_abba_allowed_strings,", "class TestFindingABASequences(unittest.TestCase): cases = ( ('aba', ['aba']), ('xyxxyx', ['xyx']), ('aaakekeke',", "in self.cases: self.assertEqual(has_abba(text), expected) class TestGettingAllowedChunks(unittest.TestCase): cases = ( ('abba[mnop]qrst[abcd]defg',", ") def test_finds_aba_sequences(self): for text, expected in self.cases: self.assertEqual(find_abas(text), expected)", ") def test_finds_tls_addresses(self): for text, expected in self.cases: self.assertEqual(supports_tls(text), expected)", "True), ) def test_finds_ssl_addresses(self): for text, expected in self.cases: self.assertEqual(supports_ssl(text),", "True), ('aaaa', False), ('abcd', False), ) def test_finds_abba_sequences(self): for text,", "2) class TestFindingABASequences(unittest.TestCase): cases = ( ('aba', ['aba']), ('xyxxyx', ['xyx']),", "for x in self.cases] self.assertEqual(count_tls_addresses(data), 2) class TestFindingABASequences(unittest.TestCase): cases =", ") def test_finds_abba_sequences(self): for text, expected in self.cases: self.assertEqual(has_abba(text), expected)", "count_tls_addresses from day07 import find_abas, supports_ssl, count_ssl_addresses class TestFindingABBASequences(unittest.TestCase): cases", "('abba', True), ('oxyyxo', True), ('aaaa', False), ('abcd', False), ) def", "self.cases] self.assertEqual(count_tls_addresses(data), 2) class TestFindingABASequences(unittest.TestCase): cases = ( ('aba', ['aba']),", "('aba[bab]xyz', True), ('xyx[xyx]xyx', False), ('aaa[kek]eke', True), ('zazbz[bzb]cdb', True), ) def", "False), ('abcd', False), ) def test_finds_abba_sequences(self): for text, expected in", "data = [x[0] for x in self.cases] self.assertEqual(count_ssl_addresses(data), 3) if", "= [x[0] for x in self.cases] self.assertEqual(count_ssl_addresses(data), 3) if __name__", "expected) class TestCheckingSSLAddresses(unittest.TestCase): cases = ( ('aba[bab]xyz', True), ('xyx[xyx]xyx', False),", ") def test_finds_disallowed_substrings(self): for text, expected in self.cases: self.assertEqual(get_abba_disallowed_strings(text), expected)", "( ('abba[mnop]qrst', True), ('abcd[bddb]xyyx', False), ('aaaa[qwer]tyui', False), ('ioxxoj[asdfgh]zxcvbn', True), )", "('abba[mnop]qrst[abcd]defg', ['abba', 'qrst', 'defg']), ) def test_finds_allowed_substrings(self): for text, expected", "expected in self.cases: self.assertEqual(supports_tls(text), expected) def test_counts_tls_addresses(self): data = [x[0]", "( ('abba[mnop]qrst[abcd]defg', ['mnop', 'abcd']), ) def test_finds_disallowed_substrings(self): for text, expected", "[x[0] for x in self.cases] self.assertEqual(count_ssl_addresses(data), 3) if __name__ ==" ]
[ "as F from torch.autograd import Variable from basenets.MLP import MLP", "keepdim=True).detach() return x # TODO: support multi-layer value function in", "outscaler, initializer, initializer_param=initializer_param, ) def forward(self, x, other_data=None): x =", "= [32, 32, 32], # hidden unit number list nonlinear", "F.softmax, outscaler = None, initializer=\"xavier\", initializer_param={} ): self.n_actions = n_actions", "initializer_param=initializer_param, ) self.logstd = nn.Parameter(torch.log(sigma * torch.ones(n_actions) + 1e-8)) def", "False, outactive = None, outscaler = None, initializer = \"orthogonal\",", "dim k_sizes, channels, strides, fcs, nonlinear, usebn, outactive, outscaler, initializer,", "forward(self,x, other_data = None): x = MLP.forward(self, x, other_data) #", "exploration, we need to make sure that the std is", "function in which action is concat before the final layer", "= None): self.logstd.cuda() return self._apply(lambda t: t.cuda(device)) class FCPG_Softmax(MLP): def", "is concat before the final layer class FCVALUE(MLP): def __init__(self,", "torch.autograd import Variable from basenets.MLP import MLP from basenets.Conv import", "other_data) # for exploration, we need to make sure that", "Conv.forward(self, x, other_data) # for exploration, and similar to e-greedy", "outactive = None, outscaler = None, initializer = \"orthogonal\", initializer_param", "usebn = False, outactive = F.softmax, outscaler = None, initializer=\"xavier\",", "n_inputfeats, n_hiddens = [30], nonlinear = F.tanh, usebn = False,", "initializer_param = {\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): self.n_actions = n_actions super(FCPG_Gaussian,", "x = x / torch.sum(x, dim=-1, keepdim=True).detach() return x #", "usebn, outactive, outscaler, initializer, initializer_param=initializer_param, ) self.logstd = nn.Parameter(torch.log(sigma *", "FCPG_Gaussian(MLP): def __init__(self, n_inputfeats, n_actions, sigma, n_hiddens = [30], nonlinear", "x = Conv.forward(self, x, other_data) # for exploration, and similar", "forward(self, x, other_data=None): x = MLP.forward(self, x, other_data) # for", "16], strides = [4, 2, 2], fcs = [32, 32,", "class FCPG_Softmax(MLP): def __init__(self, n_inputfeats, # input dim n_actions, #", "x = x + 0.01 / self.n_actions x = x", "support multi-layer value function in which action is concat before", "def __init__(self, n_inputfeats, n_actions, sigma, n_hiddens = [30], nonlinear =", "* torch.ones(n_actions) + 1e-8)) def forward(self,x, other_data = None): x", "output dim n_hiddens, # hidden unit number list nonlinear, usebn,", "= n_actions super(FCPG_Gaussian, self).__init__( n_inputfeats, # input dim n_actions, #", "too low. logstd = torch.clamp(self.logstd, min = np.log(0.1)) return x,", "F.tanh, usebn = False, outactive = None, outscaler = None,", "n_actions, # output dim n_hiddens = [10], # hidden unit", "= n_actions super(ConvPG_Softmax, self).__init__( n_inputfeats, # input dim n_actions, #", "logstd = torch.clamp(self.logstd, min = np.log(0.1)) return x, logstd.expand_as(x), torch.exp(logstd).expand_as(x)", "list nonlinear = F.relu, usebn = False, outactive = F.softmax,", "x / torch.sum(x, dim=-1, keepdim=True).detach() return x # TODO: support", "layer class FCVALUE(MLP): def __init__(self, n_inputfeats, n_hiddens = [30], nonlinear", "number list nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param, ) def", "n_actions, sigma, n_hiddens = [30], nonlinear = F.tanh, usebn =", "+ 1e-8)) def forward(self,x, other_data = None): x = MLP.forward(self,", "dim n_actions, # output dim n_hiddens, # hidden unit number", "2], fcs = [32, 32, 32], # hidden unit number", "Conv from torch import nn class FCPG_Gaussian(MLP): def __init__(self, n_inputfeats,", "# input dim n_actions, # output dim n_hiddens = [10],", "self.n_actions x = x / torch.sum(x, dim = -1, keepdim=True).detach()", "= None, outscaler = None, initializer = \"orthogonal\", initializer_param =", "+ 0.01 / self.n_actions x = x / torch.sum(x, dim", "fcs = [32, 32, 32], # hidden unit number list", "F from torch.autograd import Variable from basenets.MLP import MLP from", "n_actions, # output dim k_sizes, channels, strides, fcs, nonlinear, usebn,", "initializer_param={} ): self.n_actions = n_actions super(ConvPG_Softmax, self).__init__( n_inputfeats, # input", "other_data=None): x = Conv.forward(self, x, other_data) # for exploration, and", "multi-layer value function in which action is concat before the", "action is concat before the final layer class FCVALUE(MLP): def", "= [8, 16, 16], strides = [4, 2, 2], fcs", "n_inputfeats, 1, n_hiddens, nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param, )", "torch.exp(logstd).expand_as(x) def cuda(self, device = None): self.logstd.cuda() return self._apply(lambda t:", "dim n_hiddens = [10], # hidden unit number list nonlinear", "= x / torch.sum(x, dim = -1, keepdim=True).detach() return x", "usebn = False, outactive = None, outscaler = None, initializer", "16, 16], strides = [4, 2, 2], fcs = [32,", "output dim k_sizes = [8, 4, 3], channels = [8,", "def __init__(self, n_inputfeats, # input dim n_actions, # output dim", "x, logstd.expand_as(x), torch.exp(logstd).expand_as(x) def cuda(self, device = None): self.logstd.cuda() return", "32, 32], # hidden unit number list nonlinear = F.relu,", "self._apply(lambda t: t.cuda(device)) class FCPG_Softmax(MLP): def __init__(self, n_inputfeats, # input", "outactive = F.softmax, outscaler = None, initializer = \"orthogonal\", initializer_param", "# hidden unit number list nonlinear = F.relu, usebn =", "n_inputfeats, # input dim n_actions, # output dim k_sizes, channels,", "hidden unit number list nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param,", ") self.logstd = nn.Parameter(torch.log(sigma * torch.ones(n_actions) + 1e-8)) def forward(self,x,", "n_actions, # output dim n_hiddens, # hidden unit number list", "outscaler, initializer, initializer_param=initializer_param, ) self.logstd = nn.Parameter(torch.log(sigma * torch.ones(n_actions) +", "list nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param, ) self.logstd =", "= False, outactive = F.softmax, outscaler = None, initializer=\"xavier\", initializer_param={}", "not too low. logstd = torch.clamp(self.logstd, min = np.log(0.1)) return", "= None, initializer = \"orthogonal\", initializer_param = {\"gain\":np.sqrt(2), \"last_gain\": 0.1}", "= Conv.forward(self, x, other_data) # for exploration, and similar to", "np import torch.nn.functional as F from torch.autograd import Variable from", "list nonlinear = F.tanh, usebn = False, outactive = F.softmax,", "that the std is not too low. logstd = torch.clamp(self.logstd,", "nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param, ) self.logstd = nn.Parameter(torch.log(sigma", "dim n_actions, # output dim n_hiddens = [10], # hidden", "\"last_gain\": 0.1} ): self.n_actions = n_actions super(FCPG_Softmax, self).__init__( n_inputfeats, #", "strides, fcs, nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param, ) def", "def forward(self, x, other_data=None): x = Conv.forward(self, x, other_data) #", "[8, 4, 3], channels = [8, 16, 16], strides =", "n_actions super(FCPG_Gaussian, self).__init__( n_inputfeats, # input dim n_actions, # output", "<reponame>HTRPOCODES/HTRPO-v2<gh_stars>1-10 import torch import numpy as np import torch.nn.functional as", "input dim n_actions, # output dim n_hiddens, # hidden unit", "other_data=None): x = MLP.forward(self, x, other_data) # for exploration, and", "torch import nn class FCPG_Gaussian(MLP): def __init__(self, n_inputfeats, n_actions, sigma,", "super(FCPG_Gaussian, self).__init__( n_inputfeats, # input dim n_actions, # output dim", "super(FCVALUE, self).__init__( n_inputfeats, 1, n_hiddens, nonlinear, usebn, outactive, outscaler, initializer,", "= F.relu, usebn = False, outactive = F.softmax, outscaler =", "outactive = None, outscaler = None, initializer=\"orthogonal\", initializer_param={\"gain\":np.sqrt(2), \"last_gain\": 0.1}", "hidden unit number list nonlinear = F.relu, usebn = False,", "outscaler = None, initializer = \"orthogonal\", initializer_param = {\"gain\":np.sqrt(2), \"last_gain\":", "= \"orthogonal\", initializer_param = {\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): self.n_actions =", "ConvPG_Softmax(Conv): def __init__(self, n_inputfeats, # input dim n_actions, # output", "= {\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): self.n_actions = n_actions super(FCPG_Softmax, self).__init__(", "min = np.log(0.1)) return x, logstd.expand_as(x), torch.exp(logstd).expand_as(x) def cuda(self, device", "): self.n_actions = n_actions super(ConvPG_Softmax, self).__init__( n_inputfeats, # input dim", "dim=-1, keepdim=True).detach() return x # TODO: support multi-layer value function", "= n_actions super(FCPG_Softmax, self).__init__( n_inputfeats, # input dim n_actions, #", "nonlinear = F.tanh, usebn = False, outactive = None, outscaler", "channels = [8, 16, 16], strides = [4, 2, 2],", "/ self.n_actions x = x / torch.sum(x, dim = -1,", "cuda(self, device = None): self.logstd.cuda() return self._apply(lambda t: t.cuda(device)) class", "other_data = None): x = MLP.forward(self, x, other_data) # for", "in which action is concat before the final layer class", "# output dim k_sizes = [8, 4, 3], channels =", "FCVALUE(MLP): def __init__(self, n_inputfeats, n_hiddens = [30], nonlinear = F.tanh,", "dim n_actions, # output dim k_sizes = [8, 4, 3],", "self.n_actions = n_actions super(FCPG_Softmax, self).__init__( n_inputfeats, # input dim n_actions,", "= [10], # hidden unit number list nonlinear = F.tanh,", "= {\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): self.n_actions = n_actions super(FCPG_Gaussian, self).__init__(", "exploration, and similar to e-greedy x = x + 0.01", "usebn, outactive, outscaler, initializer, initializer_param=initializer_param, ) def forward(self, x, other_data=None):", "= [8, 4, 3], channels = [8, 16, 16], strides", "import nn class FCPG_Gaussian(MLP): def __init__(self, n_inputfeats, n_actions, sigma, n_hiddens", "n_actions super(ConvPG_Softmax, self).__init__( n_inputfeats, # input dim n_actions, # output", "the final layer class FCVALUE(MLP): def __init__(self, n_inputfeats, n_hiddens =", "nonlinear = F.tanh, usebn = False, outactive = F.softmax, outscaler", "__init__(self, n_inputfeats, # input dim n_actions, # output dim n_hiddens", "\"last_gain\": 0.1} ): self.n_actions = n_actions super(FCPG_Gaussian, self).__init__( n_inputfeats, #", "initializer=\"orthogonal\", initializer_param={\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): super(FCVALUE, self).__init__( n_inputfeats, 1, n_hiddens,", "outactive = F.softmax, outscaler = None, initializer=\"xavier\", initializer_param={} ): self.n_actions", "std is not too low. logstd = torch.clamp(self.logstd, min =", "{\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): self.n_actions = n_actions super(FCPG_Gaussian, self).__init__( n_inputfeats,", "outactive, outscaler, initializer, initializer_param=initializer_param, ) def forward(self, x, other_data=None): x", "FCPG_Softmax(MLP): def __init__(self, n_inputfeats, # input dim n_actions, # output", "F.softmax, outscaler = None, initializer = \"orthogonal\", initializer_param = {\"gain\":np.sqrt(2),", "MLP.forward(self, x, other_data) # for exploration, and similar to e-greedy", "nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param, ) def forward(self, x,", "nn class FCPG_Gaussian(MLP): def __init__(self, n_inputfeats, n_actions, sigma, n_hiddens =", "self).__init__( n_inputfeats, # input dim n_actions, # output dim n_hiddens,", "= None): x = MLP.forward(self, x, other_data) # for exploration,", "# input dim n_actions, # output dim k_sizes = [8,", "2, 2], fcs = [32, 32, 32], # hidden unit", "x = MLP.forward(self, x, other_data) # for exploration, we need", "= np.log(0.1)) return x, logstd.expand_as(x), torch.exp(logstd).expand_as(x) def cuda(self, device =", "): super(FCVALUE, self).__init__( n_inputfeats, 1, n_hiddens, nonlinear, usebn, outactive, outscaler,", "k_sizes = [8, 4, 3], channels = [8, 16, 16],", "{\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): self.n_actions = n_actions super(FCPG_Softmax, self).__init__( n_inputfeats,", "other_data) # for exploration, and similar to e-greedy x =", "def __init__(self, n_inputfeats, n_hiddens = [30], nonlinear = F.tanh, usebn", "= [30], nonlinear = F.tanh, usebn = False, outactive =", "output dim n_hiddens = [10], # hidden unit number list", "self.n_actions x = x / torch.sum(x, dim=-1, keepdim=True).detach() return x", "= False, outactive = None, outscaler = None, initializer =", "\"orthogonal\", initializer_param = {\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): self.n_actions = n_actions", "32], # hidden unit number list nonlinear = F.relu, usebn", "= x + 0.01 / self.n_actions x = x /", "False, outactive = None, outscaler = None, initializer=\"orthogonal\", initializer_param={\"gain\":np.sqrt(2), \"last_gain\":", "dim = -1, keepdim=True).detach() return x class ConvPG_Softmax(Conv): def __init__(self,", "# hidden unit number list nonlinear = F.tanh, usebn =", "x # TODO: support multi-layer value function in which action", "TODO: support multi-layer value function in which action is concat", "): self.n_actions = n_actions super(FCPG_Softmax, self).__init__( n_inputfeats, # input dim", "# output dim k_sizes, channels, strides, fcs, nonlinear, usebn, outactive,", "x, other_data=None): x = Conv.forward(self, x, other_data) # for exploration,", "0.1} ): self.n_actions = n_actions super(FCPG_Gaussian, self).__init__( n_inputfeats, # input", "self.logstd.cuda() return self._apply(lambda t: t.cuda(device)) class FCPG_Softmax(MLP): def __init__(self, n_inputfeats,", "x, other_data=None): x = MLP.forward(self, x, other_data) # for exploration,", "the std is not too low. logstd = torch.clamp(self.logstd, min", "= MLP.forward(self, x, other_data) # for exploration, we need to", "x class ConvPG_Softmax(Conv): def __init__(self, n_inputfeats, # input dim n_actions,", "e-greedy x = x + 0.01 / self.n_actions x =", "# input dim n_actions, # output dim k_sizes, channels, strides,", "usebn = False, outactive = None, outscaler = None, initializer=\"orthogonal\",", "__init__(self, n_inputfeats, # input dim n_actions, # output dim k_sizes", "def forward(self, x, other_data=None): x = MLP.forward(self, x, other_data) #", "from basenets.MLP import MLP from basenets.Conv import Conv from torch", "n_hiddens, # hidden unit number list nonlinear, usebn, outactive, outscaler,", "= F.tanh, usebn = False, outactive = F.softmax, outscaler =", "3], channels = [8, 16, 16], strides = [4, 2,", "initializer, initializer_param=initializer_param, ) def forward(self, x, other_data=None): x = MLP.forward(self,", "sigma, n_hiddens = [30], nonlinear = F.tanh, usebn = False,", "# input dim n_actions, # output dim n_hiddens, # hidden", "MLP.forward(self, x, other_data) # for exploration, we need to make", ") def forward(self, x, other_data=None): x = Conv.forward(self, x, other_data)", "# TODO: support multi-layer value function in which action is", "= -1, keepdim=True).detach() return x class ConvPG_Softmax(Conv): def __init__(self, n_inputfeats,", "= MLP.forward(self, x, other_data) # for exploration, and similar to", "False, outactive = F.softmax, outscaler = None, initializer = \"orthogonal\",", "np.log(0.1)) return x, logstd.expand_as(x), torch.exp(logstd).expand_as(x) def cuda(self, device = None):", "None): x = MLP.forward(self, x, other_data) # for exploration, we", "[4, 2, 2], fcs = [32, 32, 32], # hidden", "import Conv from torch import nn class FCPG_Gaussian(MLP): def __init__(self,", "None, outscaler = None, initializer=\"orthogonal\", initializer_param={\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): super(FCVALUE,", "numpy as np import torch.nn.functional as F from torch.autograd import", "return x, logstd.expand_as(x), torch.exp(logstd).expand_as(x) def cuda(self, device = None): self.logstd.cuda()", "self).__init__( n_inputfeats, # input dim n_actions, # output dim k_sizes,", "n_hiddens = [30], nonlinear = F.tanh, usebn = False, outactive", "0.1} ): self.n_actions = n_actions super(FCPG_Softmax, self).__init__( n_inputfeats, # input", "initializer_param={\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): super(FCVALUE, self).__init__( n_inputfeats, 1, n_hiddens, nonlinear,", "is not too low. logstd = torch.clamp(self.logstd, min = np.log(0.1))", "__init__(self, n_inputfeats, n_hiddens = [30], nonlinear = F.tanh, usebn =", "n_actions, # output dim k_sizes = [8, 4, 3], channels", "outactive, outscaler, initializer, initializer_param=initializer_param, ) self.logstd = nn.Parameter(torch.log(sigma * torch.ones(n_actions)", "n_inputfeats, # input dim n_actions, # output dim n_hiddens =", "= F.softmax, outscaler = None, initializer=\"xavier\", initializer_param={} ): self.n_actions =", "-1, keepdim=True).detach() return x class ConvPG_Softmax(Conv): def __init__(self, n_inputfeats, #", "0.1} ): super(FCVALUE, self).__init__( n_inputfeats, 1, n_hiddens, nonlinear, usebn, outactive,", "we need to make sure that the std is not", "low. logstd = torch.clamp(self.logstd, min = np.log(0.1)) return x, logstd.expand_as(x),", "# output dim n_hiddens = [10], # hidden unit number", "input dim n_actions, # output dim k_sizes = [8, 4,", "for exploration, we need to make sure that the std", "+ 0.01 / self.n_actions x = x / torch.sum(x, dim=-1,", "outscaler = None, initializer=\"orthogonal\", initializer_param={\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): super(FCVALUE, self).__init__(", "list nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param, ) def forward(self,", "n_actions super(FCPG_Softmax, self).__init__( n_inputfeats, # input dim n_actions, # output", "basenets.Conv import Conv from torch import nn class FCPG_Gaussian(MLP): def", "before the final layer class FCVALUE(MLP): def __init__(self, n_inputfeats, n_hiddens", "): self.n_actions = n_actions super(FCPG_Gaussian, self).__init__( n_inputfeats, # input dim", "device = None): self.logstd.cuda() return self._apply(lambda t: t.cuda(device)) class FCPG_Softmax(MLP):", "and similar to e-greedy x = x + 0.01 /", "= F.tanh, usebn = False, outactive = None, outscaler =", "logstd.expand_as(x), torch.exp(logstd).expand_as(x) def cuda(self, device = None): self.logstd.cuda() return self._apply(lambda", "/ torch.sum(x, dim = -1, keepdim=True).detach() return x class ConvPG_Softmax(Conv):", "[30], nonlinear = F.tanh, usebn = False, outactive = None,", "n_hiddens = [10], # hidden unit number list nonlinear =", "unit number list nonlinear = F.tanh, usebn = False, outactive", "to e-greedy x = x + 0.01 / self.n_actions x", "initializer, initializer_param=initializer_param, ) self.logstd = nn.Parameter(torch.log(sigma * torch.ones(n_actions) + 1e-8))", "# output dim n_hiddens, # hidden unit number list nonlinear,", "unit number list nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param, )", "torch.nn.functional as F from torch.autograd import Variable from basenets.MLP import", "import numpy as np import torch.nn.functional as F from torch.autograd", "from basenets.Conv import Conv from torch import nn class FCPG_Gaussian(MLP):", "Variable from basenets.MLP import MLP from basenets.Conv import Conv from", "/ self.n_actions x = x / torch.sum(x, dim=-1, keepdim=True).detach() return", "def forward(self,x, other_data = None): x = MLP.forward(self, x, other_data)", "= None, outscaler = None, initializer=\"orthogonal\", initializer_param={\"gain\":np.sqrt(2), \"last_gain\": 0.1} ):", "# for exploration, we need to make sure that the", "= [4, 2, 2], fcs = [32, 32, 32], #", "initializer=\"xavier\", initializer_param={} ): self.n_actions = n_actions super(ConvPG_Softmax, self).__init__( n_inputfeats, #", "return x class ConvPG_Softmax(Conv): def __init__(self, n_inputfeats, # input dim", "self.logstd = nn.Parameter(torch.log(sigma * torch.ones(n_actions) + 1e-8)) def forward(self,x, other_data", "1e-8)) def forward(self,x, other_data = None): x = MLP.forward(self, x,", "False, outactive = F.softmax, outscaler = None, initializer=\"xavier\", initializer_param={} ):", "number list nonlinear = F.tanh, usebn = False, outactive =", "self).__init__( n_inputfeats, 1, n_hiddens, nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param,", "= None, initializer=\"orthogonal\", initializer_param={\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): super(FCVALUE, self).__init__( n_inputfeats,", "x, other_data) # for exploration, and similar to e-greedy x", "need to make sure that the std is not too", "x, other_data) # for exploration, we need to make sure", "n_inputfeats, # input dim n_actions, # output dim n_hiddens, #", "as np import torch.nn.functional as F from torch.autograd import Variable", "import Variable from basenets.MLP import MLP from basenets.Conv import Conv", "initializer = \"orthogonal\", initializer_param = {\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): self.n_actions", "fcs, nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param, ) def forward(self,", "nn.Parameter(torch.log(sigma * torch.ones(n_actions) + 1e-8)) def forward(self,x, other_data = None):", "n_inputfeats, # input dim n_actions, # output dim k_sizes =", "None, initializer = \"orthogonal\", initializer_param = {\"gain\":np.sqrt(2), \"last_gain\": 0.1} ):", "channels, strides, fcs, nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param, )", "make sure that the std is not too low. logstd", "None): self.logstd.cuda() return self._apply(lambda t: t.cuda(device)) class FCPG_Softmax(MLP): def __init__(self,", "self.n_actions = n_actions super(ConvPG_Softmax, self).__init__( n_inputfeats, # input dim n_actions,", "# for exploration, and similar to e-greedy x = x", "to make sure that the std is not too low.", "return self._apply(lambda t: t.cuda(device)) class FCPG_Softmax(MLP): def __init__(self, n_inputfeats, #", "torch.sum(x, dim = -1, keepdim=True).detach() return x class ConvPG_Softmax(Conv): def", "= torch.clamp(self.logstd, min = np.log(0.1)) return x, logstd.expand_as(x), torch.exp(logstd).expand_as(x) def", "/ torch.sum(x, dim=-1, keepdim=True).detach() return x # TODO: support multi-layer", "= False, outactive = None, outscaler = None, initializer=\"orthogonal\", initializer_param={\"gain\":np.sqrt(2),", "# hidden unit number list nonlinear, usebn, outactive, outscaler, initializer,", "None, outscaler = None, initializer = \"orthogonal\", initializer_param = {\"gain\":np.sqrt(2),", "basenets.MLP import MLP from basenets.Conv import Conv from torch import", "usebn = False, outactive = F.softmax, outscaler = None, initializer", "for exploration, and similar to e-greedy x = x +", "= nn.Parameter(torch.log(sigma * torch.ones(n_actions) + 1e-8)) def forward(self,x, other_data =", "4, 3], channels = [8, 16, 16], strides = [4,", "similar to e-greedy x = x + 0.01 / self.n_actions", "initializer_param = {\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): self.n_actions = n_actions super(FCPG_Softmax,", "= x / torch.sum(x, dim=-1, keepdim=True).detach() return x # TODO:", "number list nonlinear = F.relu, usebn = False, outactive =", "class ConvPG_Softmax(Conv): def __init__(self, n_inputfeats, # input dim n_actions, #", "[10], # hidden unit number list nonlinear = F.tanh, usebn", "[8, 16, 16], strides = [4, 2, 2], fcs =", "__init__(self, n_inputfeats, n_actions, sigma, n_hiddens = [30], nonlinear = F.tanh,", "torch.ones(n_actions) + 1e-8)) def forward(self,x, other_data = None): x =", "strides = [4, 2, 2], fcs = [32, 32, 32],", "torch.clamp(self.logstd, min = np.log(0.1)) return x, logstd.expand_as(x), torch.exp(logstd).expand_as(x) def cuda(self,", "number list nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param, ) self.logstd", "from torch.autograd import Variable from basenets.MLP import MLP from basenets.Conv", "def cuda(self, device = None): self.logstd.cuda() return self._apply(lambda t: t.cuda(device))", "initializer, initializer_param=initializer_param, ) def forward(self, x, other_data=None): x = Conv.forward(self,", "= False, outactive = F.softmax, outscaler = None, initializer =", "initializer_param=initializer_param, ) def forward(self, x, other_data=None): x = Conv.forward(self, x,", "hidden unit number list nonlinear = F.tanh, usebn = False,", "None, initializer=\"orthogonal\", initializer_param={\"gain\":np.sqrt(2), \"last_gain\": 0.1} ): super(FCVALUE, self).__init__( n_inputfeats, 1,", "x = MLP.forward(self, x, other_data) # for exploration, and similar", "from torch import nn class FCPG_Gaussian(MLP): def __init__(self, n_inputfeats, n_actions,", "\"last_gain\": 0.1} ): super(FCVALUE, self).__init__( n_inputfeats, 1, n_hiddens, nonlinear, usebn,", "import torch import numpy as np import torch.nn.functional as F", ") def forward(self, x, other_data=None): x = MLP.forward(self, x, other_data)", "output dim k_sizes, channels, strides, fcs, nonlinear, usebn, outactive, outscaler,", "keepdim=True).detach() return x class ConvPG_Softmax(Conv): def __init__(self, n_inputfeats, # input", "k_sizes, channels, strides, fcs, nonlinear, usebn, outactive, outscaler, initializer, initializer_param=initializer_param,", "x + 0.01 / self.n_actions x = x / torch.sum(x,", "dim n_actions, # output dim k_sizes, channels, strides, fcs, nonlinear,", "MLP from basenets.Conv import Conv from torch import nn class", "class FCPG_Gaussian(MLP): def __init__(self, n_inputfeats, n_actions, sigma, n_hiddens = [30],", "input dim n_actions, # output dim n_hiddens = [10], #", "= None, initializer=\"xavier\", initializer_param={} ): self.n_actions = n_actions super(ConvPG_Softmax, self).__init__(", "which action is concat before the final layer class FCVALUE(MLP):", "t: t.cuda(device)) class FCPG_Softmax(MLP): def __init__(self, n_inputfeats, # input dim", "unit number list nonlinear = F.relu, usebn = False, outactive", "t.cuda(device)) class FCPG_Softmax(MLP): def __init__(self, n_inputfeats, # input dim n_actions,", "import MLP from basenets.Conv import Conv from torch import nn", "= F.softmax, outscaler = None, initializer = \"orthogonal\", initializer_param =", "[32, 32, 32], # hidden unit number list nonlinear =", "value function in which action is concat before the final", "final layer class FCVALUE(MLP): def __init__(self, n_inputfeats, n_hiddens = [30],", "0.01 / self.n_actions x = x / torch.sum(x, dim=-1, keepdim=True).detach()", "self.n_actions = n_actions super(FCPG_Gaussian, self).__init__( n_inputfeats, # input dim n_actions,", "import torch.nn.functional as F from torch.autograd import Variable from basenets.MLP", "forward(self, x, other_data=None): x = Conv.forward(self, x, other_data) # for", "F.tanh, usebn = False, outactive = F.softmax, outscaler = None,", "None, initializer=\"xavier\", initializer_param={} ): self.n_actions = n_actions super(ConvPG_Softmax, self).__init__( n_inputfeats,", "super(FCPG_Softmax, self).__init__( n_inputfeats, # input dim n_actions, # output dim", "input dim n_actions, # output dim k_sizes, channels, strides, fcs,", "0.01 / self.n_actions x = x / torch.sum(x, dim =", "dim k_sizes = [8, 4, 3], channels = [8, 16,", "outscaler = None, initializer=\"xavier\", initializer_param={} ): self.n_actions = n_actions super(ConvPG_Softmax,", "class FCVALUE(MLP): def __init__(self, n_inputfeats, n_hiddens = [30], nonlinear =", "torch import numpy as np import torch.nn.functional as F from", "concat before the final layer class FCVALUE(MLP): def __init__(self, n_inputfeats,", "sure that the std is not too low. logstd =", "nonlinear = F.relu, usebn = False, outactive = F.softmax, outscaler", "torch.sum(x, dim=-1, keepdim=True).detach() return x # TODO: support multi-layer value", "F.relu, usebn = False, outactive = F.softmax, outscaler = None,", "dim n_hiddens, # hidden unit number list nonlinear, usebn, outactive,", "super(ConvPG_Softmax, self).__init__( n_inputfeats, # input dim n_actions, # output dim", "return x # TODO: support multi-layer value function in which", "initializer_param=initializer_param, ) def forward(self, x, other_data=None): x = MLP.forward(self, x,", "x / torch.sum(x, dim = -1, keepdim=True).detach() return x class", "n_inputfeats, n_actions, sigma, n_hiddens = [30], nonlinear = F.tanh, usebn", "x = x / torch.sum(x, dim = -1, keepdim=True).detach() return" ]
[ "benchmark from tensorflow.python.platform import test # pylint: disable=protected-access add_sparse_to_tensors_map =", "ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=handle_concat) with", "but rank of SparseTensor\\[1\\] is: 4\"): self.evaluate(sp_roundtrip) def testTakeManyFailsWrongInputOp(self): with", "sparse_handles=st_handles) st_roundtrip_op = st_roundtrip.values.op st_serialized = sparse_ops.serialize_many_sparse(st) st_deserialized = sparse_ops.deserialize_many_sparse(", "self.assertAllEqual(combined_indices[6:, 1:], sp_input1[0]) self.assertAllEqual(combined_values[:6], sp_input0[1]) self.assertAllEqual(combined_values[6:], sp_input1[1]) self.assertAllEqual(combined_shape, [2, 5,", "2.0 (the \"License\"); # you may not use this file", "1:], input0_val[0]) self.assertAllEqual(combined_indices[6:, 0], [1] * 6) # minibatch 1", "of SparseTensor\\[1\\] is: 4\"): self.evaluate(sp_roundtrip) def testTakeManyFailsWrongInputOp(self): with self.session(use_gpu=False) as", "self._SparseTensorPlaceholder() input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_1x1x1() handle = add_sparse_to_tensors_map(sp_input)", "[handles, roundtrip], feed_dict={ sparse_tensor.indices: indices_value, sparse_tensor.values: values_value, sparse_tensor.dense_shape: shape_value })", "from tensorflow.python.ops import sparse_ops from tensorflow.python.ops import variables from tensorflow.python.platform", "dtype=np.int64) values_value = np.array([b\"a\", b\"b\", b\"c\"]) shape_value = np.array([4, 5],", "import session from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops", "import array_ops from tensorflow.python.ops import sparse_ops from tensorflow.python.ops import variables", "sparse_tensor = self._SparseTensorPlaceholder(dtype=dtypes.string) handles = add_many_sparse_to_tensors_map(sparse_tensor) roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handles.op,", "6) # minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], input1_val[0]) self.assertAllEqual(combined_values[:6], input0_val[1]) self.assertAllEqual(combined_values[6:],", "= handle_value + 10 sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=[handle_value, bad_handle])", "np.testing.assert_equal(st_roundtrip_values.dense_shape, st_deserialized_values.dense_shape) self.run_op_benchmark( sess, st_roundtrip_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_tensor_maps\") self.run_op_benchmark( sess, st_deserialized_op,", "values_value) self.assertAllEqual(roundtrip_value.dense_shape, shape_value) def testDeserializeFailsInconsistentRank(self): with self.session(use_gpu=False) as sess: sp_input", "as np from tensorflow.python.client import session from tensorflow.python.framework import dtypes", "4], [3, 2], [3, 3]]).astype(np.int64) val = np.array([0, 10, 13,", "4 because shape_value == [4, 5] indices_value = np.array([[0, 0],", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "self.assertAllEqual(combined_indices[6:, 1:], input1_val[0]) self.assertAllEqual(combined_values[:6], input0_val[1]) self.assertAllEqual(combined_values[6:], input1_val[1]) self.assertAllEqual(combined_shape, [2, 5,", "dtype=np.int64) indices_value = np.arange(num_elements, dtype=np.int64) indices = np.asarray( sorted(zip(indices_batch, indices_value)),", "import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import sparse_tensor", "def benchmarkVeryLarge2DFloatSparseTensor(self): np.random.seed(127) num_elements = 10000 batch_size = 64 indices_batch", "6]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_3x4(self, permutation): ind =", "4]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_1x1x1(self): ind = np.array([[0,", "np.array([b\"a\", b\"b\", b\"c\"]) shape_value = np.array([4, 5], dtype=np.int64) sparse_tensor =", "add_sparse_to_tensors_map(sp_input1, shared_name=\"a\") self.assertEqual(handle0.get_shape(), ()) handles_concat = array_ops.stack([handle0, handle1]) sp_out =", "sparse_map_op=handles.op, sparse_handles=handles) handles_value, roundtrip_value = sess.run( [handles, roundtrip], feed_dict={ sparse_tensor.indices:", "roundtrip_value = sess.run( [handles, roundtrip], feed_dict={ sparse_tensor.indices: indices_value, sparse_tensor.values: values_value,", "roundtrip], feed_dict={ sparse_tensor.indices: indices_value, sparse_tensor.values: values_value, sparse_tensor.dense_shape: shape_value }) self.assertEqual(handles_value.shape,", "with ops.device(\"/cpu:0\"): indices = variables.Variable(indices) values = variables.Variable(values) shape =", "_SparseTensorValue_5x6(self, permutation): ind = np.array([[0, 0], [1, 0], [1, 3],", "pylint: enable=protected-access class SparseTensorsMapTest(test.TestCase): def _SparseTensorPlaceholder(self, dtype=None): if dtype is", "sparse_map_op=handle.op, sparse_handles=sparse_handles) combined_indices, combined_values, combined_shape = self.evaluate( sp_roundtrip) self.assertAllEqual(combined_indices[:6, 0],", "np.testing.assert_equal(st_roundtrip_values.indices, st_deserialized_values.indices) np.testing.assert_equal(st_roundtrip_values.dense_shape, st_deserialized_values.dense_shape) self.run_op_benchmark( sess, st_roundtrip_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_tensor_maps\") self.run_op_benchmark(", "import sparse_tensor as sparse_tensor_lib from tensorflow.python.ops import array_ops from tensorflow.python.ops", "ind = np.array([[0, 0], [1, 0], [1, 2], [1, 3],", "handle0 = add_sparse_to_tensors_map(sp_input0, shared_name=\"a\") handle1 = add_sparse_to_tensors_map(sp_input1, shared_name=\"a\") self.assertEqual(handle0.get_shape(), ())", "use this file except in compliance with the License. #", "self._SparseTensorValue_3x4(np.arange(6)) handle0 = add_sparse_to_tensors_map(sp_input0, shared_name=\"a\") handle1 = add_sparse_to_tensors_map(sp_input1, shared_name=\"a\") self.assertEqual(handle0.get_shape(),", "# minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], sp_input0[0]) self.assertAllEqual(combined_indices[6:, 0], [1] *", "class BenchmarkSparseTensorsMapVsSerialization(test.Benchmark): def benchmarkVeryLarge2DFloatSparseTensor(self): np.random.seed(127) num_elements = 10000 batch_size =", "take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=[handle_value, bad_handle]) with self.assertRaisesOpError(r\"Unable to find SparseTensor: 10\"):", "test # pylint: disable=protected-access add_sparse_to_tensors_map = sparse_ops._add_sparse_to_tensors_map add_many_sparse_to_tensors_map = sparse_ops._add_many_sparse_to_tensors_map", "= array_ops.stack([handle0, handle1]) sp_out = take_many_sparse_from_tensors_map( sparse_map_op=handle0.op, sparse_handles=handles_concat) combined_indices, combined_values,", "roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handles.op, sparse_handles=handles) handles_value, roundtrip_value = sess.run( [handles,", "to \" r\"SparseTensor\\[1\\] was: 3 but rank of SparseTensor\\[1\\] is:", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "sparse_handles=handles) handles_value, roundtrip_value = sess.run( [handles, roundtrip], feed_dict={ sparse_tensor.indices: indices_value,", "self.session(use_gpu=False) as sess: # N == 4 because shape_value ==", "License. # You may obtain a copy of the License", "np.random.randint( batch_size, size=num_elements, dtype=np.int64) indices_value = np.arange(num_elements, dtype=np.int64) indices =", "()) handles_concat = array_ops.stack([handle0, handle1]) sp_out = take_many_sparse_from_tensors_map( sparse_map_op=handle0.op, sparse_handles=handles_concat)", "sess, st_roundtrip_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_tensor_maps\") self.run_op_benchmark( sess, st_deserialized_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_serialization\") if", "\" r\"SparseTensor\\[1\\] was: 3 but rank of SparseTensor\\[1\\] is: 4\"):", "The TensorFlow Authors. All Rights Reserved. # # Licensed under", "10 sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=[handle_value, bad_handle]) with self.assertRaisesOpError(r\"Unable to", "handle0_value = sess.run(handle, feed_dict={sp_input: input0_val}) handle1_value = sess.run(handle, feed_dict={sp_input: input1_val})", "add_many_sparse_to_tensors_map = sparse_ops._add_many_sparse_to_tensors_map take_many_sparse_from_tensors_map = ( sparse_ops._take_many_sparse_from_tensors_map) # pylint: enable=protected-access", "= add_sparse_to_tensors_map(sp_input) handle0_value = sess.run(handle, feed_dict={sp_input: input0_val}) handle1_value = sess.run(handle,", "under the License is distributed on an \"AS IS\" BASIS,", "self.assertRaisesOpError(r\"Unable to find SparseTensor: 10\"): self.evaluate(sp_roundtrip) class BenchmarkSparseTensorsMapVsSerialization(test.Benchmark): def benchmarkVeryLarge2DFloatSparseTensor(self):", "tensorflow.python.ops import array_ops from tensorflow.python.ops import sparse_ops from tensorflow.python.ops import", "License for the specific language governing permissions and # limitations", "_SparseTensorValue_3x4(self, permutation): ind = np.array([[0, 0], [1, 0], [1, 2],", "# minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], input0_val[0]) self.assertAllEqual(combined_indices[6:, 0], [1] *", "input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_1x1x1() handle = add_sparse_to_tensors_map(sp_input) handle0_value", "# minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], input1_val[0]) self.assertAllEqual(combined_values[:6], input0_val[1]) self.assertAllEqual(combined_values[6:], input1_val[1])", "return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_1x1x1(self): ind = np.array([[0, 0,", "dtype is None: dtype = dtypes.int32 return sparse_tensor_lib.SparseTensor( array_ops.placeholder(dtypes.int64), array_ops.placeholder(dtype),", "Reserved. # # Licensed under the Apache License, Version 2.0", "= self.evaluate( sp_roundtrip) self.assertAllEqual(combined_indices[:6, 0], [0] * 6) # minibatch", "with self.session(use_gpu=False) as sess: input_val = self._SparseTensorValue_5x6(np.arange(6)) handle = add_sparse_to_tensors_map(input_val)", "ops.device(\"/cpu:0\"): indices = variables.Variable(indices) values = variables.Variable(values) shape = variables.Variable(shape)", "self.evaluate(sp_roundtrip) class BenchmarkSparseTensorsMapVsSerialization(test.Benchmark): def benchmarkVeryLarge2DFloatSparseTensor(self): np.random.seed(127) num_elements = 10000 batch_size", "governing permissions and # limitations under the License. # ==============================================================================", "permutation): ind = np.array([[0, 0], [1, 0], [1, 3], [1,", "from __future__ import division from __future__ import print_function import numpy", "TensorFlow Authors. All Rights Reserved. # # Licensed under the", "handle1]) sp_out = take_many_sparse_from_tensors_map( sparse_map_op=handle0.op, sparse_handles=handles_concat) combined_indices, combined_values, combined_shape =", "32, 33]).astype(np.int32) ind = ind[permutation] val = val[permutation] shape =", "as sess: sp_input = self._SparseTensorPlaceholder() input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val =", "= self._SparseTensorValue_3x4(np.arange(6)) handle = add_sparse_to_tensors_map(sp_input) handle0_value = sess.run(handle, feed_dict={sp_input: input0_val})", "import division from __future__ import print_function import numpy as np", "SparseTensors: rank prior to \" r\"SparseTensor\\[1\\] was: 3 but rank", "from __future__ import absolute_import from __future__ import division from __future__", "tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import", "= self._SparseTensorValue_3x4(np.arange(6)) handle0 = add_sparse_to_tensors_map(sp_input0, shared_name=\"a\") handle1 = add_sparse_to_tensors_map(sp_input1, shared_name=\"a\")", "in compliance with the License. # You may obtain a", "handles_concat = array_ops.stack([handle0, handle1]) sp_out = take_many_sparse_from_tensors_map( sparse_map_op=handle0.op, sparse_handles=handles_concat) combined_indices,", "software # distributed under the License is distributed on an", "val = val[permutation] shape = np.array([5, 6]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val,", "= sess.run(handle, feed_dict={sp_input: input0_val}) handle1_value = sess.run(handle, feed_dict={sp_input: input1_val}) handle_concat", "np.array([[0, 0], [0, 1], [2, 0]], dtype=np.int64) values_value = np.array([b\"a\",", "array_ops.placeholder(dtypes.int64), array_ops.placeholder(dtype), array_ops.placeholder(dtypes.int64)) def _SparseTensorValue_5x6(self, permutation): ind = np.array([[0, 0],", "self.evaluate(st_deserialized) np.testing.assert_equal(st_roundtrip_values.values, st_deserialized_values.values) np.testing.assert_equal(st_roundtrip_values.indices, st_deserialized_values.indices) np.testing.assert_equal(st_roundtrip_values.dense_shape, st_deserialized_values.dense_shape) self.run_op_benchmark( sess, st_roundtrip_op,", "= ( sparse_ops._take_many_sparse_from_tensors_map) # pylint: enable=protected-access class SparseTensorsMapTest(test.TestCase): def _SparseTensorPlaceholder(self,", "np.array([[0, 0, 0]]).astype(np.int64) val = np.array([0]).astype(np.int32) shape = np.array([3, 4,", "input0_val[1]) self.assertAllEqual(combined_values[6:], input1_val[1]) self.assertAllEqual(combined_shape, [2, 5, 6]) def testAddManyTakeManyRoundTrip(self): with", "# minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], sp_input1[0]) self.assertAllEqual(combined_values[:6], sp_input0[1]) self.assertAllEqual(combined_values[6:], sp_input1[1])", "[1, 3], [2, 2], [2, 3]]).astype(np.int64) val = np.array([0, 10,", "= np.arange(num_elements, dtype=np.int64) indices = np.asarray( sorted(zip(indices_batch, indices_value)), dtype=np.int64) values", "= sess.run(handle, feed_dict={sp_input: input1_val}) handle_concat = ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64)", "st_serialized, dtype=values.dtype) st_deserialized_op = st_deserialized.values.op variables.global_variables_initializer().run() st_roundtrip_values = self.evaluate(st_roundtrip) st_deserialized_values", "def _SparseTensorValue_1x1x1(self): ind = np.array([[0, 0, 0]]).astype(np.int64) val = np.array([0]).astype(np.int32)", "[1] * 6) # minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], input1_val[0]) self.assertAllEqual(combined_values[:6],", "dtype=np.int64) with session.Session(config=benchmark.benchmark_config()) as sess: with ops.device(\"/cpu:0\"): indices = variables.Variable(indices)", "ind = np.array([[0, 0, 0]]).astype(np.int64) val = np.array([0]).astype(np.int32) shape =", "enable=protected-access class SparseTensorsMapTest(test.TestCase): def _SparseTensorPlaceholder(self, dtype=None): if dtype is None:", "input1_val[1]) self.assertAllEqual(combined_shape, [2, 5, 6]) def testAddManyTakeManyRoundTrip(self): with self.session(use_gpu=False) as", "find SparseTensor: 10\"): self.evaluate(sp_roundtrip) class BenchmarkSparseTensorsMapVsSerialization(test.Benchmark): def benchmarkVeryLarge2DFloatSparseTensor(self): np.random.seed(127) num_elements", "minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], sp_input1[0]) self.assertAllEqual(combined_values[:6], sp_input0[1]) self.assertAllEqual(combined_values[6:], sp_input1[1]) self.assertAllEqual(combined_shape,", "np.array([0]).astype(np.int32) shape = np.array([3, 4, 5]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape)", "dtype=np.int64) values = [\"feature_value_for_embedding_lookup\"] * num_elements shape = np.asarray([batch_size, num_elements],", "minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], input0_val[0]) self.assertAllEqual(combined_indices[6:, 0], [1] * 6)", "== 4 because shape_value == [4, 5] indices_value = np.array([[0,", "self._SparseTensorPlaceholder() input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_3x4(np.arange(6)) handle = add_sparse_to_tensors_map(sp_input)", "with self.assertRaisesOpError(r\"Unable to find SparseTensor: 10\"): self.evaluate(sp_roundtrip) class BenchmarkSparseTensorsMapVsSerialization(test.Benchmark): def", "= st_deserialized.values.op variables.global_variables_initializer().run() st_roundtrip_values = self.evaluate(st_roundtrip) st_deserialized_values = self.evaluate(st_deserialized) np.testing.assert_equal(st_roundtrip_values.values,", "self.assertAllEqual(combined_shape, [2, 5, 6]) def testAddManyTakeManyRoundTrip(self): with self.session(use_gpu=False) as sess:", "np.testing.assert_equal(st_roundtrip_values.values, st_deserialized_values.values) np.testing.assert_equal(st_roundtrip_values.indices, st_deserialized_values.indices) np.testing.assert_equal(st_roundtrip_values.dense_shape, st_deserialized_values.dense_shape) self.run_op_benchmark( sess, st_roundtrip_op, min_iters=2000,", "self.assertAllEqual(combined_indices[:6, 1:], input0_val[0]) self.assertAllEqual(combined_indices[6:, 0], [1] * 6) # minibatch", "division from __future__ import print_function import numpy as np from", "1:], sp_input0[0]) self.assertAllEqual(combined_indices[6:, 0], [1] * 6) # minibatch 1", "st_deserialized_values.values) np.testing.assert_equal(st_roundtrip_values.indices, st_deserialized_values.indices) np.testing.assert_equal(st_roundtrip_values.dense_shape, st_deserialized_values.dense_shape) self.run_op_benchmark( sess, st_roundtrip_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_tensor_maps\")", "ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=sparse_handles) combined_indices,", "SparseTensorsMapTest(test.TestCase): def _SparseTensorPlaceholder(self, dtype=None): if dtype is None: dtype =", "feed_dict={ sparse_tensor.indices: indices_value, sparse_tensor.values: values_value, sparse_tensor.dense_shape: shape_value }) self.assertEqual(handles_value.shape, (4,))", "return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_3x4(self, permutation): ind = np.array([[0,", "as sess: # N == 4 because shape_value == [4,", "np from tensorflow.python.client import session from tensorflow.python.framework import dtypes from", "ops from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib from tensorflow.python.ops import", "SparseTensorsMap.\"\"\" from __future__ import absolute_import from __future__ import division from", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "def testFeedAddTakeMany(self): with self.session(use_gpu=False) as sess: sp_input = self._SparseTensorPlaceholder() input0_val", "limitations under the License. # ============================================================================== \"\"\"Tests for SparseTensorsMap.\"\"\" from", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "= take_many_sparse_from_tensors_map( sparse_map_op=st_handles.op, sparse_handles=st_handles) st_roundtrip_op = st_roundtrip.values.op st_serialized = sparse_ops.serialize_many_sparse(st)", "= np.asarray( sorted(zip(indices_batch, indices_value)), dtype=np.int64) values = [\"feature_value_for_embedding_lookup\"] * num_elements", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "sparse_tensor as sparse_tensor_lib from tensorflow.python.ops import array_ops from tensorflow.python.ops import", "to in writing, software # distributed under the License is", "numpy as np from tensorflow.python.client import session from tensorflow.python.framework import", "# See the License for the specific language governing permissions", "from tensorflow.python.framework import ops from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib", "values, shape) st_handles = add_many_sparse_to_tensors_map(st) st_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=st_handles.op, sparse_handles=st_handles)", "[3, 3]]).astype(np.int64) val = np.array([0, 10, 13, 14, 32, 33]).astype(np.int32)", "sparse_ops._add_many_sparse_to_tensors_map take_many_sparse_from_tensors_map = ( sparse_ops._take_many_sparse_from_tensors_map) # pylint: enable=protected-access class SparseTensorsMapTest(test.TestCase):", "language governing permissions and # limitations under the License. #", "or agreed to in writing, software # distributed under the", "6]) def testAddManyTakeManyRoundTrip(self): with self.session(use_gpu=False) as sess: # N ==", "def _SparseTensorValue_5x6(self, permutation): ind = np.array([[0, 0], [1, 0], [1,", "required by applicable law or agreed to in writing, software", "sess.run(handle, feed_dict={sp_input: input0_val}) handle1_value = sess.run(handle, feed_dict={sp_input: input1_val}) handle_concat =", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "testAddManyTakeManyRoundTrip(self): with self.session(use_gpu=False) as sess: # N == 4 because", "array_ops from tensorflow.python.ops import sparse_ops from tensorflow.python.ops import variables from", "with the License. # You may obtain a copy of", "indices_batch = np.random.randint( batch_size, size=num_elements, dtype=np.int64) indices_value = np.arange(num_elements, dtype=np.int64)", "num_elements], dtype=np.int64) with session.Session(config=benchmark.benchmark_config()) as sess: with ops.device(\"/cpu:0\"): indices =", "under the License. # ============================================================================== \"\"\"Tests for SparseTensorsMap.\"\"\" from __future__", "tensorflow.python.ops import variables from tensorflow.python.platform import benchmark from tensorflow.python.platform import", "values_value = np.array([b\"a\", b\"b\", b\"c\"]) shape_value = np.array([4, 5], dtype=np.int64)", "input0_val}) handle1_value = sess.run(handle, feed_dict={sp_input: input1_val}) handle_concat = ops.convert_to_tensor( [handle0_value,", "= sparse_tensor_lib.SparseTensor(indices, values, shape) st_handles = add_many_sparse_to_tensors_map(st) st_roundtrip = take_many_sparse_from_tensors_map(", "handle_concat = ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op,", "compliance with the License. # You may obtain a copy", "All Rights Reserved. # # Licensed under the Apache License,", "agreed to in writing, software # distributed under the License", "with self.assertRaisesOpError( r\"Inconsistent rank across SparseTensors: rank prior to \"", "= variables.Variable(indices) values = variables.Variable(values) shape = variables.Variable(shape) st =", "[2, 5, 6]) def testFeedAddTakeMany(self): with self.session(use_gpu=False) as sess: sp_input", "sparse_map_op=handle.op, sparse_handles=[handle_value, bad_handle]) with self.assertRaisesOpError(r\"Unable to find SparseTensor: 10\"): self.evaluate(sp_roundtrip)", "3], [1, 4], [3, 2], [3, 3]]).astype(np.int64) val = np.array([0,", "sparse_map_op=handle0.op, sparse_handles=handles_concat) combined_indices, combined_values, combined_shape = self.evaluate(sp_out) self.assertAllEqual(combined_indices[:6, 0], [0]", "distributed under the License is distributed on an \"AS IS\"", "add_many_sparse_to_tensors_map(sparse_tensor) roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handles.op, sparse_handles=handles) handles_value, roundtrip_value = sess.run(", "= np.array([0, 10, 13, 14, 32, 33]).astype(np.int32) ind = ind[permutation]", "shape) def testAddTakeMany(self): with self.session(graph=ops.Graph(), use_gpu=False) as sess: sp_input0 =", "print_function import numpy as np from tensorflow.python.client import session from", "= variables.Variable(values) shape = variables.Variable(shape) st = sparse_tensor_lib.SparseTensor(indices, values, shape)", "shape) st_handles = add_many_sparse_to_tensors_map(st) st_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=st_handles.op, sparse_handles=st_handles) st_roundtrip_op", "0], [1, 3], [1, 4], [3, 2], [3, 3]]).astype(np.int64) val", "express or implied. # See the License for the specific", "1 self.assertAllEqual(combined_indices[6:, 1:], sp_input1[0]) self.assertAllEqual(combined_values[:6], sp_input0[1]) self.assertAllEqual(combined_values[6:], sp_input1[1]) self.assertAllEqual(combined_shape, [2,", "except in compliance with the License. # You may obtain", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "feed_dict={sp_input: input1_val}) sparse_handles = ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip =", "not use this file except in compliance with the License.", "= self._SparseTensorValue_5x6(np.arange(6)) sp_input1 = self._SparseTensorValue_3x4(np.arange(6)) handle0 = add_sparse_to_tensors_map(sp_input0, shared_name=\"a\") handle1", "was: 3 but rank of SparseTensor\\[1\\] is: 4\"): self.evaluate(sp_roundtrip) def", "0], [1, 0], [1, 3], [1, 4], [3, 2], [3,", "writing, software # distributed under the License is distributed on", "handle_value = self.evaluate(handle) bad_handle = handle_value + 10 sp_roundtrip =", "sp_input1[1]) self.assertAllEqual(combined_shape, [2, 5, 6]) def testFeedAddTakeMany(self): with self.session(use_gpu=False) as", "you may not use this file except in compliance with", "[handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=sparse_handles) combined_indices, combined_values,", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def testAddTakeMany(self): with self.session(graph=ops.Graph(), use_gpu=False) as", "shape_value == [4, 5] indices_value = np.array([[0, 0], [0, 1],", "rank across SparseTensors: rank prior to \" r\"SparseTensor\\[1\\] was: 3", "[1, 0], [1, 2], [1, 3], [2, 2], [2, 3]]).astype(np.int64)", "= np.array([3, 4, 5]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def testAddTakeMany(self):", "dtype=values.dtype) st_deserialized_op = st_deserialized.values.op variables.global_variables_initializer().run() st_roundtrip_values = self.evaluate(st_roundtrip) st_deserialized_values =", "tensorflow.python.client import session from tensorflow.python.framework import dtypes from tensorflow.python.framework import", "indices_value = np.arange(num_elements, dtype=np.int64) indices = np.asarray( sorted(zip(indices_batch, indices_value)), dtype=np.int64)", "CONDITIONS OF ANY KIND, either express or implied. # See", "from tensorflow.python.platform import benchmark from tensorflow.python.platform import test # pylint:", "take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=sparse_handles) combined_indices, combined_values, combined_shape = self.evaluate( sp_roundtrip) self.assertAllEqual(combined_indices[:6,", "import ops from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib from tensorflow.python.ops", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "feed_dict={sp_input: input1_val}) handle_concat = ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip =", "prior to \" r\"SparseTensor\\[1\\] was: 3 but rank of SparseTensor\\[1\\]", "with session.Session(config=benchmark.benchmark_config()) as sess: with ops.device(\"/cpu:0\"): indices = variables.Variable(indices) values", "[4, 5] indices_value = np.array([[0, 0], [0, 1], [2, 0]],", "sparse_ops.deserialize_many_sparse( st_serialized, dtype=values.dtype) st_deserialized_op = st_deserialized.values.op variables.global_variables_initializer().run() st_roundtrip_values = self.evaluate(st_roundtrip)", "st_roundtrip_values = self.evaluate(st_roundtrip) st_deserialized_values = self.evaluate(st_deserialized) np.testing.assert_equal(st_roundtrip_values.values, st_deserialized_values.values) np.testing.assert_equal(st_roundtrip_values.indices, st_deserialized_values.indices)", "st_deserialized.values.op variables.global_variables_initializer().run() st_roundtrip_values = self.evaluate(st_roundtrip) st_deserialized_values = self.evaluate(st_deserialized) np.testing.assert_equal(st_roundtrip_values.values, st_deserialized_values.values)", "self.session(use_gpu=False) as sess: sp_input = self._SparseTensorPlaceholder() input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val", "permissions and # limitations under the License. # ============================================================================== \"\"\"Tests", "tensorflow.python.platform import test # pylint: disable=protected-access add_sparse_to_tensors_map = sparse_ops._add_sparse_to_tensors_map add_many_sparse_to_tensors_map", "from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib from tensorflow.python.ops import array_ops", "val = np.array([0]).astype(np.int32) shape = np.array([3, 4, 5]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind,", "self.assertEqual(handle0.get_shape(), ()) handles_concat = array_ops.stack([handle0, handle1]) sp_out = take_many_sparse_from_tensors_map( sparse_map_op=handle0.op,", "[0, 1], [2, 0]], dtype=np.int64) values_value = np.array([b\"a\", b\"b\", b\"c\"])", "np.random.seed(127) num_elements = 10000 batch_size = 64 indices_batch = np.random.randint(", "self._SparseTensorValue_3x4(np.arange(6)) handle = add_sparse_to_tensors_map(sp_input) handle0_value = sess.run(handle, feed_dict={sp_input: input0_val}) handle1_value", "input1_val = self._SparseTensorValue_1x1x1() handle = add_sparse_to_tensors_map(sp_input) handle0_value = sess.run(handle, feed_dict={sp_input:", "st_roundtrip_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_tensor_maps\") self.run_op_benchmark( sess, st_deserialized_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_serialization\") if __name__", "combined_values, combined_shape = self.evaluate( sp_roundtrip) self.assertAllEqual(combined_indices[:6, 0], [0] * 6)", "import numpy as np from tensorflow.python.client import session from tensorflow.python.framework", "sparse_handles = ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op,", "with self.session(use_gpu=False) as sess: # N == 4 because shape_value", "from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework", "# pylint: disable=protected-access add_sparse_to_tensors_map = sparse_ops._add_sparse_to_tensors_map add_many_sparse_to_tensors_map = sparse_ops._add_many_sparse_to_tensors_map take_many_sparse_from_tensors_map", "= val[permutation] shape = np.array([5, 6]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape)", "is None: dtype = dtypes.int32 return sparse_tensor_lib.SparseTensor( array_ops.placeholder(dtypes.int64), array_ops.placeholder(dtype), array_ops.placeholder(dtypes.int64))", "1], [2, 0]], dtype=np.int64) values_value = np.array([b\"a\", b\"b\", b\"c\"]) shape_value", "self.session(graph=ops.Graph(), use_gpu=False) as sess: sp_input0 = self._SparseTensorValue_5x6(np.arange(6)) sp_input1 = self._SparseTensorValue_3x4(np.arange(6))", "self._SparseTensorValue_5x6(np.arange(6)) handle = add_sparse_to_tensors_map(input_val) handle_value = self.evaluate(handle) bad_handle = handle_value", "self.run_op_benchmark( sess, st_deserialized_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_serialization\") if __name__ == \"__main__\": test.main()", "sess.run(handle, feed_dict={sp_input: input0_val}) handle1_value = sess.run(handle, feed_dict={sp_input: input1_val}) sparse_handles =", "tensorflow.python.ops import sparse_ops from tensorflow.python.ops import variables from tensorflow.python.platform import", "self.evaluate(sp_out) self.assertAllEqual(combined_indices[:6, 0], [0] * 6) # minibatch 0 self.assertAllEqual(combined_indices[:6,", "self._SparseTensorValue_1x1x1() handle = add_sparse_to_tensors_map(sp_input) handle0_value = sess.run(handle, feed_dict={sp_input: input0_val}) handle1_value", "sp_input0[1]) self.assertAllEqual(combined_values[6:], sp_input1[1]) self.assertAllEqual(combined_shape, [2, 5, 6]) def testFeedAddTakeMany(self): with", "OR CONDITIONS OF ANY KIND, either express or implied. #", "= self._SparseTensorPlaceholder() input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_3x4(np.arange(6)) handle =", "dtype=None): if dtype is None: dtype = dtypes.int32 return sparse_tensor_lib.SparseTensor(", "handles = add_many_sparse_to_tensors_map(sparse_tensor) roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handles.op, sparse_handles=handles) handles_value, roundtrip_value", "dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=sparse_handles) combined_indices, combined_values, combined_shape =", "6) # minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], sp_input0[0]) self.assertAllEqual(combined_indices[6:, 0], [1]", "the License is distributed on an \"AS IS\" BASIS, #", "2015 The TensorFlow Authors. All Rights Reserved. # # Licensed", "import sparse_ops from tensorflow.python.ops import variables from tensorflow.python.platform import benchmark", "sparse_handles=[handle_value, bad_handle]) with self.assertRaisesOpError(r\"Unable to find SparseTensor: 10\"): self.evaluate(sp_roundtrip) class", "val[permutation] shape = np.array([5, 6]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def", "take_many_sparse_from_tensors_map = ( sparse_ops._take_many_sparse_from_tensors_map) # pylint: enable=protected-access class SparseTensorsMapTest(test.TestCase): def", "from tensorflow.python.ops import array_ops from tensorflow.python.ops import sparse_ops from tensorflow.python.ops", "= np.array([[0, 0], [1, 0], [1, 3], [1, 4], [3,", "shape_value = np.array([4, 5], dtype=np.int64) sparse_tensor = self._SparseTensorPlaceholder(dtype=dtypes.string) handles =", "permutation): ind = np.array([[0, 0], [1, 0], [1, 2], [1,", "ind[permutation] val = val[permutation] shape = np.array([5, 6]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind,", "0 self.assertAllEqual(combined_indices[:6, 1:], sp_input0[0]) self.assertAllEqual(combined_indices[6:, 0], [1] * 6) #", "= self._SparseTensorValue_5x6(np.arange(6)) handle = add_sparse_to_tensors_map(input_val) handle_value = self.evaluate(handle) bad_handle =", "b\"b\", b\"c\"]) shape_value = np.array([4, 5], dtype=np.int64) sparse_tensor = self._SparseTensorPlaceholder(dtype=dtypes.string)", "batch_size = 64 indices_batch = np.random.randint( batch_size, size=num_elements, dtype=np.int64) indices_value", "law or agreed to in writing, software # distributed under", "indices_value)), dtype=np.int64) values = [\"feature_value_for_embedding_lookup\"] * num_elements shape = np.asarray([batch_size,", "[3, 2], [3, 3]]).astype(np.int64) val = np.array([0, 10, 13, 14,", "= 10000 batch_size = 64 indices_batch = np.random.randint( batch_size, size=num_elements,", "= 64 indices_batch = np.random.randint( batch_size, size=num_elements, dtype=np.int64) indices_value =", "testTakeManyFailsWrongInputOp(self): with self.session(use_gpu=False) as sess: input_val = self._SparseTensorValue_5x6(np.arange(6)) handle =", "input1_val}) sparse_handles = ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map(", "because shape_value == [4, 5] indices_value = np.array([[0, 0], [0,", "input_val = self._SparseTensorValue_5x6(np.arange(6)) handle = add_sparse_to_tensors_map(input_val) handle_value = self.evaluate(handle) bad_handle", "5], dtype=np.int64) sparse_tensor = self._SparseTensorPlaceholder(dtype=dtypes.string) handles = add_many_sparse_to_tensors_map(sparse_tensor) roundtrip =", "= np.array([0]).astype(np.int32) shape = np.array([3, 4, 5]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val,", "the License. # ============================================================================== \"\"\"Tests for SparseTensorsMap.\"\"\" from __future__ import", "self.evaluate(handle) bad_handle = handle_value + 10 sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op,", "tensorflow.python.framework import ops from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib from", "self.assertAllEqual(combined_shape, [2, 5, 6]) def testFeedAddTakeMany(self): with self.session(use_gpu=False) as sess:", "= np.array([5, 6]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_3x4(self, permutation):", "[1] * 6) # minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], sp_input1[0]) self.assertAllEqual(combined_values[:6],", "handles_value, roundtrip_value = sess.run( [handles, roundtrip], feed_dict={ sparse_tensor.indices: indices_value, sparse_tensor.values:", "combined_shape = self.evaluate(sp_out) self.assertAllEqual(combined_indices[:6, 0], [0] * 6) # minibatch", "sp_roundtrip) self.assertAllEqual(combined_indices[:6, 0], [0] * 6) # minibatch 0 self.assertAllEqual(combined_indices[:6,", "shape = np.array([5, 6]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_3x4(self,", "may obtain a copy of the License at # #", "sparse_ops._take_many_sparse_from_tensors_map) # pylint: enable=protected-access class SparseTensorsMapTest(test.TestCase): def _SparseTensorPlaceholder(self, dtype=None): if", "indices_value, sparse_tensor.values: values_value, sparse_tensor.dense_shape: shape_value }) self.assertEqual(handles_value.shape, (4,)) self.assertAllEqual(roundtrip_value.indices, indices_value)", "2], [1, 3], [2, 2], [2, 3]]).astype(np.int64) val = np.array([0,", "add_many_sparse_to_tensors_map(st) st_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=st_handles.op, sparse_handles=st_handles) st_roundtrip_op = st_roundtrip.values.op st_serialized", "dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import sparse_tensor as", "= self._SparseTensorPlaceholder() input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_1x1x1() handle =", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "[0] * 6) # minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], sp_input0[0]) self.assertAllEqual(combined_indices[6:,", "variables.Variable(values) shape = variables.Variable(shape) st = sparse_tensor_lib.SparseTensor(indices, values, shape) st_handles", "np.array([0, 10, 13, 14, 32, 33]).astype(np.int32) ind = ind[permutation] val", "b\"c\"]) shape_value = np.array([4, 5], dtype=np.int64) sparse_tensor = self._SparseTensorPlaceholder(dtype=dtypes.string) handles", "sparse_ops from tensorflow.python.ops import variables from tensorflow.python.platform import benchmark from", "5]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def testAddTakeMany(self): with self.session(graph=ops.Graph(), use_gpu=False)", "may not use this file except in compliance with the", "3 but rank of SparseTensor\\[1\\] is: 4\"): self.evaluate(sp_roundtrip) def testTakeManyFailsWrongInputOp(self):", "combined_shape = self.evaluate( sp_roundtrip) self.assertAllEqual(combined_indices[:6, 0], [0] * 6) #", "sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=sparse_handles) combined_indices, combined_values, combined_shape = self.evaluate(", "10, 13, 14, 32, 33]).astype(np.int32) ind = ind[permutation] val =", "0], [1, 2], [1, 3], [2, 2], [2, 3]]).astype(np.int64) val", "sp_input0 = self._SparseTensorValue_5x6(np.arange(6)) sp_input1 = self._SparseTensorValue_3x4(np.arange(6)) handle0 = add_sparse_to_tensors_map(sp_input0, shared_name=\"a\")", "= sparse_ops.serialize_many_sparse(st) st_deserialized = sparse_ops.deserialize_many_sparse( st_serialized, dtype=values.dtype) st_deserialized_op = st_deserialized.values.op", "with self.session(graph=ops.Graph(), use_gpu=False) as sess: sp_input0 = self._SparseTensorValue_5x6(np.arange(6)) sp_input1 =", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "sess: sp_input0 = self._SparseTensorValue_5x6(np.arange(6)) sp_input1 = self._SparseTensorValue_3x4(np.arange(6)) handle0 = add_sparse_to_tensors_map(sp_input0,", "5] indices_value = np.array([[0, 0], [0, 1], [2, 0]], dtype=np.int64)", "this file except in compliance with the License. # You", "3], [2, 2], [2, 3]]).astype(np.int64) val = np.array([0, 10, 13,", "* 6) # minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], input0_val[0]) self.assertAllEqual(combined_indices[6:, 0],", "sparse_tensor_lib.SparseTensor( array_ops.placeholder(dtypes.int64), array_ops.placeholder(dtype), array_ops.placeholder(dtypes.int64)) def _SparseTensorValue_5x6(self, permutation): ind = np.array([[0,", "import absolute_import from __future__ import division from __future__ import print_function", "# pylint: enable=protected-access class SparseTensorsMapTest(test.TestCase): def _SparseTensorPlaceholder(self, dtype=None): if dtype", "13, 14, 32, 33]).astype(np.int32) ind = ind[permutation] val = val[permutation]", "# limitations under the License. # ============================================================================== \"\"\"Tests for SparseTensorsMap.\"\"\"", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "= self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_1x1x1() handle = add_sparse_to_tensors_map(sp_input) handle0_value =", "sess.run(handle, feed_dict={sp_input: input1_val}) sparse_handles = ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip", "st_deserialized_values.indices) np.testing.assert_equal(st_roundtrip_values.dense_shape, st_deserialized_values.dense_shape) self.run_op_benchmark( sess, st_roundtrip_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_tensor_maps\") self.run_op_benchmark( sess,", "# # Licensed under the Apache License, Version 2.0 (the", "as sess: input_val = self._SparseTensorValue_5x6(np.arange(6)) handle = add_sparse_to_tensors_map(input_val) handle_value =", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "val = np.array([0, 10, 13, 14, 32, 33]).astype(np.int32) ind =", "sess.run(handle, feed_dict={sp_input: input1_val}) handle_concat = ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip", "sess.run( [handles, roundtrip], feed_dict={ sparse_tensor.indices: indices_value, sparse_tensor.values: values_value, sparse_tensor.dense_shape: shape_value", "num_elements shape = np.asarray([batch_size, num_elements], dtype=np.int64) with session.Session(config=benchmark.benchmark_config()) as sess:", "= add_many_sparse_to_tensors_map(st) st_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=st_handles.op, sparse_handles=st_handles) st_roundtrip_op = st_roundtrip.values.op", "* 6) # minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], input1_val[0]) self.assertAllEqual(combined_values[:6], input0_val[1])", "sparse_handles=handles_concat) combined_indices, combined_values, combined_shape = self.evaluate(sp_out) self.assertAllEqual(combined_indices[:6, 0], [0] *", "shape = np.array([3, 4, 5]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def", "testAddTakeMany(self): with self.session(graph=ops.Graph(), use_gpu=False) as sess: sp_input0 = self._SparseTensorValue_5x6(np.arange(6)) sp_input1", "add_sparse_to_tensors_map = sparse_ops._add_sparse_to_tensors_map add_many_sparse_to_tensors_map = sparse_ops._add_many_sparse_to_tensors_map take_many_sparse_from_tensors_map = ( sparse_ops._take_many_sparse_from_tensors_map)", "(4,)) self.assertAllEqual(roundtrip_value.indices, indices_value) self.assertAllEqual(roundtrip_value.values, values_value) self.assertAllEqual(roundtrip_value.dense_shape, shape_value) def testDeserializeFailsInconsistentRank(self): with", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "sparse_handles=handle_concat) with self.assertRaisesOpError( r\"Inconsistent rank across SparseTensors: rank prior to", "val[permutation] shape = np.array([3, 4]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def", "64 indices_batch = np.random.randint( batch_size, size=num_elements, dtype=np.int64) indices_value = np.arange(num_elements,", "array_ops.placeholder(dtypes.int64)) def _SparseTensorValue_5x6(self, permutation): ind = np.array([[0, 0], [1, 0],", "if dtype is None: dtype = dtypes.int32 return sparse_tensor_lib.SparseTensor( array_ops.placeholder(dtypes.int64),", "input1_val = self._SparseTensorValue_3x4(np.arange(6)) handle = add_sparse_to_tensors_map(sp_input) handle0_value = sess.run(handle, feed_dict={sp_input:", "= take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=sparse_handles) combined_indices, combined_values, combined_shape = self.evaluate( sp_roundtrip)", "2], [2, 3]]).astype(np.int64) val = np.array([0, 10, 13, 14, 32,", "values = variables.Variable(values) shape = variables.Variable(shape) st = sparse_tensor_lib.SparseTensor(indices, values,", "= self._SparseTensorValue_1x1x1() handle = add_sparse_to_tensors_map(sp_input) handle0_value = sess.run(handle, feed_dict={sp_input: input0_val})", "handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=sparse_handles) combined_indices, combined_values, combined_shape", "= np.random.randint( batch_size, size=num_elements, dtype=np.int64) indices_value = np.arange(num_elements, dtype=np.int64) indices", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "sparse_tensor_lib from tensorflow.python.ops import array_ops from tensorflow.python.ops import sparse_ops from", "None: dtype = dtypes.int32 return sparse_tensor_lib.SparseTensor( array_ops.placeholder(dtypes.int64), array_ops.placeholder(dtype), array_ops.placeholder(dtypes.int64)) def", "array_ops.stack([handle0, handle1]) sp_out = take_many_sparse_from_tensors_map( sparse_map_op=handle0.op, sparse_handles=handles_concat) combined_indices, combined_values, combined_shape", "handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=handle_concat) with self.assertRaisesOpError( r\"Inconsistent", "= add_sparse_to_tensors_map(input_val) handle_value = self.evaluate(handle) bad_handle = handle_value + 10", "take_many_sparse_from_tensors_map( sparse_map_op=st_handles.op, sparse_handles=st_handles) st_roundtrip_op = st_roundtrip.values.op st_serialized = sparse_ops.serialize_many_sparse(st) st_deserialized", "st_serialized = sparse_ops.serialize_many_sparse(st) st_deserialized = sparse_ops.deserialize_many_sparse( st_serialized, dtype=values.dtype) st_deserialized_op =", "= st_roundtrip.values.op st_serialized = sparse_ops.serialize_many_sparse(st) st_deserialized = sparse_ops.deserialize_many_sparse( st_serialized, dtype=values.dtype)", "6) # minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], sp_input1[0]) self.assertAllEqual(combined_values[:6], sp_input0[1]) self.assertAllEqual(combined_values[6:],", "= sess.run( [handles, roundtrip], feed_dict={ sparse_tensor.indices: indices_value, sparse_tensor.values: values_value, sparse_tensor.dense_shape:", "val = val[permutation] shape = np.array([3, 4]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val,", "or implied. # See the License for the specific language", "num_elements = 10000 batch_size = 64 indices_batch = np.random.randint( batch_size,", "Rights Reserved. # # Licensed under the Apache License, Version", "feed_dict={sp_input: input0_val}) handle1_value = sess.run(handle, feed_dict={sp_input: input1_val}) handle_concat = ops.convert_to_tensor(", "[\"feature_value_for_embedding_lookup\"] * num_elements shape = np.asarray([batch_size, num_elements], dtype=np.int64) with session.Session(config=benchmark.benchmark_config())", "indices = variables.Variable(indices) values = variables.Variable(values) shape = variables.Variable(shape) st", "st_deserialized = sparse_ops.deserialize_many_sparse( st_serialized, dtype=values.dtype) st_deserialized_op = st_deserialized.values.op variables.global_variables_initializer().run() st_roundtrip_values", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "np.arange(num_elements, dtype=np.int64) indices = np.asarray( sorted(zip(indices_batch, indices_value)), dtype=np.int64) values =", "= dtypes.int32 return sparse_tensor_lib.SparseTensor( array_ops.placeholder(dtypes.int64), array_ops.placeholder(dtype), array_ops.placeholder(dtypes.int64)) def _SparseTensorValue_5x6(self, permutation):", "# ============================================================================== \"\"\"Tests for SparseTensorsMap.\"\"\" from __future__ import absolute_import from", "combined_indices, combined_values, combined_shape = self.evaluate(sp_out) self.assertAllEqual(combined_indices[:6, 0], [0] * 6)", "0], [0] * 6) # minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], input0_val[0])", "input1_val}) handle_concat = ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map(", "[2, 0]], dtype=np.int64) values_value = np.array([b\"a\", b\"b\", b\"c\"]) shape_value =", "from tensorflow.python.client import session from tensorflow.python.framework import dtypes from tensorflow.python.framework", "add_sparse_to_tensors_map(sp_input0, shared_name=\"a\") handle1 = add_sparse_to_tensors_map(sp_input1, shared_name=\"a\") self.assertEqual(handle0.get_shape(), ()) handles_concat =", "= take_many_sparse_from_tensors_map( sparse_map_op=handles.op, sparse_handles=handles) handles_value, roundtrip_value = sess.run( [handles, roundtrip],", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=[handle_value, bad_handle]) with self.assertRaisesOpError(r\"Unable to find", "self.assertAllEqual(combined_values[6:], input1_val[1]) self.assertAllEqual(combined_shape, [2, 5, 6]) def testAddManyTakeManyRoundTrip(self): with self.session(use_gpu=False)", "+ 10 sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=[handle_value, bad_handle]) with self.assertRaisesOpError(r\"Unable", "self.assertAllEqual(combined_indices[:6, 0], [0] * 6) # minibatch 0 self.assertAllEqual(combined_indices[:6, 1:],", "= variables.Variable(shape) st = sparse_tensor_lib.SparseTensor(indices, values, shape) st_handles = add_many_sparse_to_tensors_map(st)", "batch_size, size=num_elements, dtype=np.int64) indices_value = np.arange(num_elements, dtype=np.int64) indices = np.asarray(", "= np.array([3, 4]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_1x1x1(self): ind", "( sparse_ops._take_many_sparse_from_tensors_map) # pylint: enable=protected-access class SparseTensorsMapTest(test.TestCase): def _SparseTensorPlaceholder(self, dtype=None):", "benchmarkVeryLarge2DFloatSparseTensor(self): np.random.seed(127) num_elements = 10000 batch_size = 64 indices_batch =", "(the \"License\"); # you may not use this file except", "self.assertEqual(handles_value.shape, (4,)) self.assertAllEqual(roundtrip_value.indices, indices_value) self.assertAllEqual(roundtrip_value.values, values_value) self.assertAllEqual(roundtrip_value.dense_shape, shape_value) def testDeserializeFailsInconsistentRank(self):", "# you may not use this file except in compliance", "sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_1x1x1(self): ind = np.array([[0, 0, 0]]).astype(np.int64)", "self.assertAllEqual(roundtrip_value.indices, indices_value) self.assertAllEqual(roundtrip_value.values, values_value) self.assertAllEqual(roundtrip_value.dense_shape, shape_value) def testDeserializeFailsInconsistentRank(self): with self.session(use_gpu=False)", "shape_value) def testDeserializeFailsInconsistentRank(self): with self.session(use_gpu=False) as sess: sp_input = self._SparseTensorPlaceholder()", "minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], input1_val[0]) self.assertAllEqual(combined_values[:6], input0_val[1]) self.assertAllEqual(combined_values[6:], input1_val[1]) self.assertAllEqual(combined_shape,", "shape = variables.Variable(shape) st = sparse_tensor_lib.SparseTensor(indices, values, shape) st_handles =", "[2, 3]]).astype(np.int64) val = np.array([0, 10, 13, 14, 32, 33]).astype(np.int32)", "variables.global_variables_initializer().run() st_roundtrip_values = self.evaluate(st_roundtrip) st_deserialized_values = self.evaluate(st_deserialized) np.testing.assert_equal(st_roundtrip_values.values, st_deserialized_values.values) np.testing.assert_equal(st_roundtrip_values.indices,", "tensorflow.python.platform import benchmark from tensorflow.python.platform import test # pylint: disable=protected-access", "take_many_sparse_from_tensors_map( sparse_map_op=handles.op, sparse_handles=handles) handles_value, roundtrip_value = sess.run( [handles, roundtrip], feed_dict={", "sparse_tensor.values: values_value, sparse_tensor.dense_shape: shape_value }) self.assertEqual(handles_value.shape, (4,)) self.assertAllEqual(roundtrip_value.indices, indices_value) self.assertAllEqual(roundtrip_value.values,", "r\"SparseTensor\\[1\\] was: 3 but rank of SparseTensor\\[1\\] is: 4\"): self.evaluate(sp_roundtrip)", "name=\"benchmark_very_large_2d_float_st_tensor_maps\") self.run_op_benchmark( sess, st_deserialized_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_serialization\") if __name__ == \"__main__\":", "Copyright 2015 The TensorFlow Authors. All Rights Reserved. # #", "sparse_tensor_lib.SparseTensorValue(ind, val, shape) def testAddTakeMany(self): with self.session(graph=ops.Graph(), use_gpu=False) as sess:", "shared_name=\"a\") self.assertEqual(handle0.get_shape(), ()) handles_concat = array_ops.stack([handle0, handle1]) sp_out = take_many_sparse_from_tensors_map(", "# # Unless required by applicable law or agreed to", "rank prior to \" r\"SparseTensor\\[1\\] was: 3 but rank of", "N == 4 because shape_value == [4, 5] indices_value =", "from tensorflow.python.ops import variables from tensorflow.python.platform import benchmark from tensorflow.python.platform", "tensorflow.python.framework import sparse_tensor as sparse_tensor_lib from tensorflow.python.ops import array_ops from", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "10000 batch_size = 64 indices_batch = np.random.randint( batch_size, size=num_elements, dtype=np.int64)", "self.evaluate(st_roundtrip) st_deserialized_values = self.evaluate(st_deserialized) np.testing.assert_equal(st_roundtrip_values.values, st_deserialized_values.values) np.testing.assert_equal(st_roundtrip_values.indices, st_deserialized_values.indices) np.testing.assert_equal(st_roundtrip_values.dense_shape, st_deserialized_values.dense_shape)", "ind[permutation] val = val[permutation] shape = np.array([3, 4]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind,", "ind = ind[permutation] val = val[permutation] shape = np.array([5, 6]).astype(np.int64)", "Version 2.0 (the \"License\"); # you may not use this", "np.array([3, 4]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_1x1x1(self): ind =", "shape = np.asarray([batch_size, num_elements], dtype=np.int64) with session.Session(config=benchmark.benchmark_config()) as sess: with", "[1, 3], [1, 4], [3, 2], [3, 3]]).astype(np.int64) val =", "= self.evaluate(st_roundtrip) st_deserialized_values = self.evaluate(st_deserialized) np.testing.assert_equal(st_roundtrip_values.values, st_deserialized_values.values) np.testing.assert_equal(st_roundtrip_values.indices, st_deserialized_values.indices) np.testing.assert_equal(st_roundtrip_values.dense_shape,", "return sparse_tensor_lib.SparseTensor( array_ops.placeholder(dtypes.int64), array_ops.placeholder(dtype), array_ops.placeholder(dtypes.int64)) def _SparseTensorValue_5x6(self, permutation): ind =", "1:], sp_input1[0]) self.assertAllEqual(combined_values[:6], sp_input0[1]) self.assertAllEqual(combined_values[6:], sp_input1[1]) self.assertAllEqual(combined_shape, [2, 5, 6])", "= self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_3x4(np.arange(6)) handle = add_sparse_to_tensors_map(sp_input) handle0_value =", "indices_value) self.assertAllEqual(roundtrip_value.values, values_value) self.assertAllEqual(roundtrip_value.dense_shape, shape_value) def testDeserializeFailsInconsistentRank(self): with self.session(use_gpu=False) as", "self.assertAllEqual(roundtrip_value.dense_shape, shape_value) def testDeserializeFailsInconsistentRank(self): with self.session(use_gpu=False) as sess: sp_input =", "st_deserialized_op = st_deserialized.values.op variables.global_variables_initializer().run() st_roundtrip_values = self.evaluate(st_roundtrip) st_deserialized_values = self.evaluate(st_deserialized)", "self.evaluate(sp_roundtrip) def testTakeManyFailsWrongInputOp(self): with self.session(use_gpu=False) as sess: input_val = self._SparseTensorValue_5x6(np.arange(6))", "dtype = dtypes.int32 return sparse_tensor_lib.SparseTensor( array_ops.placeholder(dtypes.int64), array_ops.placeholder(dtype), array_ops.placeholder(dtypes.int64)) def _SparseTensorValue_5x6(self,", "__future__ import absolute_import from __future__ import division from __future__ import", "33]).astype(np.int32) ind = ind[permutation] val = val[permutation] shape = np.array([5,", "np.array([5, 6]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_3x4(self, permutation): ind", "implied. # See the License for the specific language governing", "= np.asarray([batch_size, num_elements], dtype=np.int64) with session.Session(config=benchmark.benchmark_config()) as sess: with ops.device(\"/cpu:0\"):", "under the Apache License, Version 2.0 (the \"License\"); # you", "sorted(zip(indices_batch, indices_value)), dtype=np.int64) values = [\"feature_value_for_embedding_lookup\"] * num_elements shape =", "indices_value = np.array([[0, 0], [0, 1], [2, 0]], dtype=np.int64) values_value", "0], [1, 0], [1, 2], [1, 3], [2, 2], [2,", "self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_3x4(np.arange(6)) handle = add_sparse_to_tensors_map(sp_input) handle0_value = sess.run(handle,", "shape) def _SparseTensorValue_1x1x1(self): ind = np.array([[0, 0, 0]]).astype(np.int64) val =", "np.asarray( sorted(zip(indices_batch, indices_value)), dtype=np.int64) values = [\"feature_value_for_embedding_lookup\"] * num_elements shape", "self.assertAllEqual(combined_values[6:], sp_input1[1]) self.assertAllEqual(combined_shape, [2, 5, 6]) def testFeedAddTakeMany(self): with self.session(use_gpu=False)", "sess: sp_input = self._SparseTensorPlaceholder() input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_1x1x1()", "self.assertRaisesOpError( r\"Inconsistent rank across SparseTensors: rank prior to \" r\"SparseTensor\\[1\\]", "shape_value }) self.assertEqual(handles_value.shape, (4,)) self.assertAllEqual(roundtrip_value.indices, indices_value) self.assertAllEqual(roundtrip_value.values, values_value) self.assertAllEqual(roundtrip_value.dense_shape, shape_value)", "by applicable law or agreed to in writing, software #", "SparseTensor: 10\"): self.evaluate(sp_roundtrip) class BenchmarkSparseTensorsMapVsSerialization(test.Benchmark): def benchmarkVeryLarge2DFloatSparseTensor(self): np.random.seed(127) num_elements =", "0, 0]]).astype(np.int64) val = np.array([0]).astype(np.int32) shape = np.array([3, 4, 5]).astype(np.int64)", "= take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=handle_concat) with self.assertRaisesOpError( r\"Inconsistent rank across SparseTensors:", "st = sparse_tensor_lib.SparseTensor(indices, values, shape) st_handles = add_many_sparse_to_tensors_map(st) st_roundtrip =", "0]], dtype=np.int64) values_value = np.array([b\"a\", b\"b\", b\"c\"]) shape_value = np.array([4,", "import benchmark from tensorflow.python.platform import test # pylint: disable=protected-access add_sparse_to_tensors_map", "14, 32, 33]).astype(np.int32) ind = ind[permutation] val = val[permutation] shape", "add_sparse_to_tensors_map(input_val) handle_value = self.evaluate(handle) bad_handle = handle_value + 10 sp_roundtrip", "import test # pylint: disable=protected-access add_sparse_to_tensors_map = sparse_ops._add_sparse_to_tensors_map add_many_sparse_to_tensors_map =", "dtype=np.int64) sparse_tensor = self._SparseTensorPlaceholder(dtype=dtypes.string) handles = add_many_sparse_to_tensors_map(sparse_tensor) roundtrip = take_many_sparse_from_tensors_map(", "= take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=[handle_value, bad_handle]) with self.assertRaisesOpError(r\"Unable to find SparseTensor:", "6]) def testFeedAddTakeMany(self): with self.session(use_gpu=False) as sess: sp_input = self._SparseTensorPlaceholder()", "= sess.run(handle, feed_dict={sp_input: input0_val}) handle1_value = sess.run(handle, feed_dict={sp_input: input1_val}) sparse_handles", "= self.evaluate(handle) bad_handle = handle_value + 10 sp_roundtrip = take_many_sparse_from_tensors_map(", "self._SparseTensorValue_5x6(np.arange(6)) sp_input1 = self._SparseTensorValue_3x4(np.arange(6)) handle0 = add_sparse_to_tensors_map(sp_input0, shared_name=\"a\") handle1 =", "handle1_value = sess.run(handle, feed_dict={sp_input: input1_val}) sparse_handles = ops.convert_to_tensor( [handle0_value, handle1_value],", "sp_input1[0]) self.assertAllEqual(combined_values[:6], sp_input0[1]) self.assertAllEqual(combined_values[6:], sp_input1[1]) self.assertAllEqual(combined_shape, [2, 5, 6]) def", "rank of SparseTensor\\[1\\] is: 4\"): self.evaluate(sp_roundtrip) def testTakeManyFailsWrongInputOp(self): with self.session(use_gpu=False)", "add_sparse_to_tensors_map(sp_input) handle0_value = sess.run(handle, feed_dict={sp_input: input0_val}) handle1_value = sess.run(handle, feed_dict={sp_input:", "= take_many_sparse_from_tensors_map( sparse_map_op=handle0.op, sparse_handles=handles_concat) combined_indices, combined_values, combined_shape = self.evaluate(sp_out) self.assertAllEqual(combined_indices[:6,", "* 6) # minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], sp_input0[0]) self.assertAllEqual(combined_indices[6:, 0],", "st_roundtrip_op = st_roundtrip.values.op st_serialized = sparse_ops.serialize_many_sparse(st) st_deserialized = sparse_ops.deserialize_many_sparse( st_serialized,", "\"\"\"Tests for SparseTensorsMap.\"\"\" from __future__ import absolute_import from __future__ import", "sp_out = take_many_sparse_from_tensors_map( sparse_map_op=handle0.op, sparse_handles=handles_concat) combined_indices, combined_values, combined_shape = self.evaluate(sp_out)", "BenchmarkSparseTensorsMapVsSerialization(test.Benchmark): def benchmarkVeryLarge2DFloatSparseTensor(self): np.random.seed(127) num_elements = 10000 batch_size = 64", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "0], [1] * 6) # minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], sp_input1[0])", "sparse_tensor.dense_shape: shape_value }) self.assertEqual(handles_value.shape, (4,)) self.assertAllEqual(roundtrip_value.indices, indices_value) self.assertAllEqual(roundtrip_value.values, values_value) self.assertAllEqual(roundtrip_value.dense_shape,", "Unless required by applicable law or agreed to in writing,", "as sess: with ops.device(\"/cpu:0\"): indices = variables.Variable(indices) values = variables.Variable(values)", "as sess: sp_input0 = self._SparseTensorValue_5x6(np.arange(6)) sp_input1 = self._SparseTensorValue_3x4(np.arange(6)) handle0 =", "= [\"feature_value_for_embedding_lookup\"] * num_elements shape = np.asarray([batch_size, num_elements], dtype=np.int64) with", "6) # minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], input0_val[0]) self.assertAllEqual(combined_indices[6:, 0], [1]", "variables.Variable(indices) values = variables.Variable(values) shape = variables.Variable(shape) st = sparse_tensor_lib.SparseTensor(indices,", "__future__ import print_function import numpy as np from tensorflow.python.client import", "_SparseTensorValue_1x1x1(self): ind = np.array([[0, 0, 0]]).astype(np.int64) val = np.array([0]).astype(np.int32) shape", "input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_3x4(np.arange(6)) handle = add_sparse_to_tensors_map(sp_input) handle0_value", "the specific language governing permissions and # limitations under the", "val, shape) def testAddTakeMany(self): with self.session(graph=ops.Graph(), use_gpu=False) as sess: sp_input0", "self.evaluate( sp_roundtrip) self.assertAllEqual(combined_indices[:6, 0], [0] * 6) # minibatch 0", "4\"): self.evaluate(sp_roundtrip) def testTakeManyFailsWrongInputOp(self): with self.session(use_gpu=False) as sess: input_val =", "testFeedAddTakeMany(self): with self.session(use_gpu=False) as sess: sp_input = self._SparseTensorPlaceholder() input0_val =", "= val[permutation] shape = np.array([3, 4]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape)", "0], [0, 1], [2, 0]], dtype=np.int64) values_value = np.array([b\"a\", b\"b\",", "0 self.assertAllEqual(combined_indices[:6, 1:], input0_val[0]) self.assertAllEqual(combined_indices[6:, 0], [1] * 6) #", "is: 4\"): self.evaluate(sp_roundtrip) def testTakeManyFailsWrongInputOp(self): with self.session(use_gpu=False) as sess: input_val", "applicable law or agreed to in writing, software # distributed", "def testDeserializeFailsInconsistentRank(self): with self.session(use_gpu=False) as sess: sp_input = self._SparseTensorPlaceholder() input0_val", "= add_sparse_to_tensors_map(sp_input1, shared_name=\"a\") self.assertEqual(handle0.get_shape(), ()) handles_concat = array_ops.stack([handle0, handle1]) sp_out", "[2, 5, 6]) def testAddManyTakeManyRoundTrip(self): with self.session(use_gpu=False) as sess: #", "self._SparseTensorPlaceholder(dtype=dtypes.string) handles = add_many_sparse_to_tensors_map(sparse_tensor) roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handles.op, sparse_handles=handles) handles_value,", "1 self.assertAllEqual(combined_indices[6:, 1:], input1_val[0]) self.assertAllEqual(combined_values[:6], input0_val[1]) self.assertAllEqual(combined_values[6:], input1_val[1]) self.assertAllEqual(combined_shape, [2,", "= np.array([[0, 0], [1, 0], [1, 2], [1, 3], [2,", "sparse_ops.serialize_many_sparse(st) st_deserialized = sparse_ops.deserialize_many_sparse( st_serialized, dtype=values.dtype) st_deserialized_op = st_deserialized.values.op variables.global_variables_initializer().run()", "[1, 4], [3, 2], [3, 3]]).astype(np.int64) val = np.array([0, 10,", "SparseTensor\\[1\\] is: 4\"): self.evaluate(sp_roundtrip) def testTakeManyFailsWrongInputOp(self): with self.session(use_gpu=False) as sess:", "* num_elements shape = np.asarray([batch_size, num_elements], dtype=np.int64) with session.Session(config=benchmark.benchmark_config()) as", "take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=handle_concat) with self.assertRaisesOpError( r\"Inconsistent rank across SparseTensors: rank", "============================================================================== \"\"\"Tests for SparseTensorsMap.\"\"\" from __future__ import absolute_import from __future__", "= ind[permutation] val = val[permutation] shape = np.array([3, 4]).astype(np.int64) return", "in writing, software # distributed under the License is distributed", "indices = np.asarray( sorted(zip(indices_batch, indices_value)), dtype=np.int64) values = [\"feature_value_for_embedding_lookup\"] *", "bad_handle]) with self.assertRaisesOpError(r\"Unable to find SparseTensor: 10\"): self.evaluate(sp_roundtrip) class BenchmarkSparseTensorsMapVsSerialization(test.Benchmark):", "= np.array([4, 5], dtype=np.int64) sparse_tensor = self._SparseTensorPlaceholder(dtype=dtypes.string) handles = add_many_sparse_to_tensors_map(sparse_tensor)", "size=num_elements, dtype=np.int64) indices_value = np.arange(num_elements, dtype=np.int64) indices = np.asarray( sorted(zip(indices_batch,", "4, 5]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def testAddTakeMany(self): with self.session(graph=ops.Graph(),", "= sparse_ops._add_many_sparse_to_tensors_map take_many_sparse_from_tensors_map = ( sparse_ops._take_many_sparse_from_tensors_map) # pylint: enable=protected-access class", "ind = np.array([[0, 0], [1, 0], [1, 3], [1, 4],", "handle1 = add_sparse_to_tensors_map(sp_input1, shared_name=\"a\") self.assertEqual(handle0.get_shape(), ()) handles_concat = array_ops.stack([handle0, handle1])", "0]]).astype(np.int64) val = np.array([0]).astype(np.int32) shape = np.array([3, 4, 5]).astype(np.int64) return", "values = [\"feature_value_for_embedding_lookup\"] * num_elements shape = np.asarray([batch_size, num_elements], dtype=np.int64)", "= self.evaluate(st_deserialized) np.testing.assert_equal(st_roundtrip_values.values, st_deserialized_values.values) np.testing.assert_equal(st_roundtrip_values.indices, st_deserialized_values.indices) np.testing.assert_equal(st_roundtrip_values.dense_shape, st_deserialized_values.dense_shape) self.run_op_benchmark( sess,", "input1_val[0]) self.assertAllEqual(combined_values[:6], input0_val[1]) self.assertAllEqual(combined_values[6:], input1_val[1]) self.assertAllEqual(combined_shape, [2, 5, 6]) def", "bad_handle = handle_value + 10 sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=[handle_value,", "st_deserialized_values = self.evaluate(st_deserialized) np.testing.assert_equal(st_roundtrip_values.values, st_deserialized_values.values) np.testing.assert_equal(st_roundtrip_values.indices, st_deserialized_values.indices) np.testing.assert_equal(st_roundtrip_values.dense_shape, st_deserialized_values.dense_shape) self.run_op_benchmark(", "= add_sparse_to_tensors_map(sp_input0, shared_name=\"a\") handle1 = add_sparse_to_tensors_map(sp_input1, shared_name=\"a\") self.assertEqual(handle0.get_shape(), ()) handles_concat", "self.run_op_benchmark( sess, st_roundtrip_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_tensor_maps\") self.run_op_benchmark( sess, st_deserialized_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_serialization\")", "# N == 4 because shape_value == [4, 5] indices_value", "ind = ind[permutation] val = val[permutation] shape = np.array([3, 4]).astype(np.int64)", "as sparse_tensor_lib from tensorflow.python.ops import array_ops from tensorflow.python.ops import sparse_ops", "disable=protected-access add_sparse_to_tensors_map = sparse_ops._add_sparse_to_tensors_map add_many_sparse_to_tensors_map = sparse_ops._add_many_sparse_to_tensors_map take_many_sparse_from_tensors_map = (", "0], [1] * 6) # minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], input1_val[0])", "dtypes.int32 return sparse_tensor_lib.SparseTensor( array_ops.placeholder(dtypes.int64), array_ops.placeholder(dtype), array_ops.placeholder(dtypes.int64)) def _SparseTensorValue_5x6(self, permutation): ind", "= np.array([[0, 0, 0]]).astype(np.int64) val = np.array([0]).astype(np.int32) shape = np.array([3,", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "st_handles = add_many_sparse_to_tensors_map(st) st_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=st_handles.op, sparse_handles=st_handles) st_roundtrip_op =", "License, Version 2.0 (the \"License\"); # you may not use", "def _SparseTensorValue_3x4(self, permutation): ind = np.array([[0, 0], [1, 0], [1,", "# You may obtain a copy of the License at", "[handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=handle_concat) with self.assertRaisesOpError(", "combined_values, combined_shape = self.evaluate(sp_out) self.assertAllEqual(combined_indices[:6, 0], [0] * 6) #", "== [4, 5] indices_value = np.array([[0, 0], [0, 1], [2,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "= self._SparseTensorPlaceholder(dtype=dtypes.string) handles = add_many_sparse_to_tensors_map(sparse_tensor) roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handles.op, sparse_handles=handles)", "Authors. All Rights Reserved. # # Licensed under the Apache", "st_roundtrip.values.op st_serialized = sparse_ops.serialize_many_sparse(st) st_deserialized = sparse_ops.deserialize_many_sparse( st_serialized, dtype=values.dtype) st_deserialized_op", "np.array([3, 4, 5]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def testAddTakeMany(self): with", "from tensorflow.python.platform import test # pylint: disable=protected-access add_sparse_to_tensors_map = sparse_ops._add_sparse_to_tensors_map", "shape = np.array([3, 4]).astype(np.int64) return sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_1x1x1(self):", "input0_val}) handle1_value = sess.run(handle, feed_dict={sp_input: input1_val}) sparse_handles = ops.convert_to_tensor( [handle0_value,", "np.array([[0, 0], [1, 0], [1, 3], [1, 4], [3, 2],", "minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], sp_input0[0]) self.assertAllEqual(combined_indices[6:, 0], [1] * 6)", "session.Session(config=benchmark.benchmark_config()) as sess: with ops.device(\"/cpu:0\"): indices = variables.Variable(indices) values =", "= self.evaluate(sp_out) self.assertAllEqual(combined_indices[:6, 0], [0] * 6) # minibatch 0", "self.assertAllEqual(combined_indices[:6, 1:], sp_input0[0]) self.assertAllEqual(combined_indices[6:, 0], [1] * 6) # minibatch", "= np.array([b\"a\", b\"b\", b\"c\"]) shape_value = np.array([4, 5], dtype=np.int64) sparse_tensor", "sess: input_val = self._SparseTensorValue_5x6(np.arange(6)) handle = add_sparse_to_tensors_map(input_val) handle_value = self.evaluate(handle)", "min_iters=2000, name=\"benchmark_very_large_2d_float_st_tensor_maps\") self.run_op_benchmark( sess, st_deserialized_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_serialization\") if __name__ ==", "self.assertAllEqual(combined_indices[6:, 0], [1] * 6) # minibatch 1 self.assertAllEqual(combined_indices[6:, 1:],", "dtype=np.int64) indices = np.asarray( sorted(zip(indices_batch, indices_value)), dtype=np.int64) values = [\"feature_value_for_embedding_lookup\"]", "the License for the specific language governing permissions and #", "session from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from", "Apache License, Version 2.0 (the \"License\"); # you may not", "[0] * 6) # minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], input0_val[0]) self.assertAllEqual(combined_indices[6:,", "sparse_map_op=st_handles.op, sparse_handles=st_handles) st_roundtrip_op = st_roundtrip.values.op st_serialized = sparse_ops.serialize_many_sparse(st) st_deserialized =", "for SparseTensorsMap.\"\"\" from __future__ import absolute_import from __future__ import division", "either express or implied. # See the License for the", "np.array([4, 5], dtype=np.int64) sparse_tensor = self._SparseTensorPlaceholder(dtype=dtypes.string) handles = add_many_sparse_to_tensors_map(sparse_tensor) roundtrip", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "33]).astype(np.int32) ind = ind[permutation] val = val[permutation] shape = np.array([3,", "self.assertAllEqual(combined_values[:6], sp_input0[1]) self.assertAllEqual(combined_values[6:], sp_input1[1]) self.assertAllEqual(combined_shape, [2, 5, 6]) def testFeedAddTakeMany(self):", "0], [0] * 6) # minibatch 0 self.assertAllEqual(combined_indices[:6, 1:], sp_input0[0])", "sparse_map_op=handle.op, sparse_handles=handle_concat) with self.assertRaisesOpError( r\"Inconsistent rank across SparseTensors: rank prior", "class SparseTensorsMapTest(test.TestCase): def _SparseTensorPlaceholder(self, dtype=None): if dtype is None: dtype", "feed_dict={sp_input: input0_val}) handle1_value = sess.run(handle, feed_dict={sp_input: input1_val}) sparse_handles = ops.convert_to_tensor(", "pylint: disable=protected-access add_sparse_to_tensors_map = sparse_ops._add_sparse_to_tensors_map add_many_sparse_to_tensors_map = sparse_ops._add_many_sparse_to_tensors_map take_many_sparse_from_tensors_map =", "array_ops.placeholder(dtype), array_ops.placeholder(dtypes.int64)) def _SparseTensorValue_5x6(self, permutation): ind = np.array([[0, 0], [1,", "def testTakeManyFailsWrongInputOp(self): with self.session(use_gpu=False) as sess: input_val = self._SparseTensorValue_5x6(np.arange(6)) handle", "_SparseTensorPlaceholder(self, dtype=None): if dtype is None: dtype = dtypes.int32 return", "val, shape) def _SparseTensorValue_1x1x1(self): ind = np.array([[0, 0, 0]]).astype(np.int64) val", "sp_input0[0]) self.assertAllEqual(combined_indices[6:, 0], [1] * 6) # minibatch 1 self.assertAllEqual(combined_indices[6:,", "handle_value + 10 sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=[handle_value, bad_handle]) with", "across SparseTensors: rank prior to \" r\"SparseTensor\\[1\\] was: 3 but", "import variables from tensorflow.python.platform import benchmark from tensorflow.python.platform import test", "[2, 2], [2, 3]]).astype(np.int64) val = np.array([0, 10, 13, 14,", "sp_input = self._SparseTensorPlaceholder() input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_3x4(np.arange(6)) handle", "val, shape) def _SparseTensorValue_3x4(self, permutation): ind = np.array([[0, 0], [1,", "}) self.assertEqual(handles_value.shape, (4,)) self.assertAllEqual(roundtrip_value.indices, indices_value) self.assertAllEqual(roundtrip_value.values, values_value) self.assertAllEqual(roundtrip_value.dense_shape, shape_value) def", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "def testAddManyTakeManyRoundTrip(self): with self.session(use_gpu=False) as sess: # N == 4", "sparse_tensor.indices: indices_value, sparse_tensor.values: values_value, sparse_tensor.dense_shape: shape_value }) self.assertEqual(handles_value.shape, (4,)) self.assertAllEqual(roundtrip_value.indices,", "License. # ============================================================================== \"\"\"Tests for SparseTensorsMap.\"\"\" from __future__ import absolute_import", "variables.Variable(shape) st = sparse_tensor_lib.SparseTensor(indices, values, shape) st_handles = add_many_sparse_to_tensors_map(st) st_roundtrip", "= ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=handle_concat)", "dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=handle_concat) with self.assertRaisesOpError( r\"Inconsistent rank", "= add_many_sparse_to_tensors_map(sparse_tensor) roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handles.op, sparse_handles=handles) handles_value, roundtrip_value =", "r\"Inconsistent rank across SparseTensors: rank prior to \" r\"SparseTensor\\[1\\] was:", "shape) def _SparseTensorValue_3x4(self, permutation): ind = np.array([[0, 0], [1, 0],", "to find SparseTensor: 10\"): self.evaluate(sp_roundtrip) class BenchmarkSparseTensorsMapVsSerialization(test.Benchmark): def benchmarkVeryLarge2DFloatSparseTensor(self): np.random.seed(127)", "np.asarray([batch_size, num_elements], dtype=np.int64) with session.Session(config=benchmark.benchmark_config()) as sess: with ops.device(\"/cpu:0\"): indices", "variables from tensorflow.python.platform import benchmark from tensorflow.python.platform import test #", "handle = add_sparse_to_tensors_map(input_val) handle_value = self.evaluate(handle) bad_handle = handle_value +", "sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=handle_concat) with self.assertRaisesOpError( r\"Inconsistent rank across", "= sess.run(handle, feed_dict={sp_input: input1_val}) sparse_handles = ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64)", "absolute_import from __future__ import division from __future__ import print_function import", "input0_val[0]) self.assertAllEqual(combined_indices[6:, 0], [1] * 6) # minibatch 1 self.assertAllEqual(combined_indices[6:,", "5, 6]) def testFeedAddTakeMany(self): with self.session(use_gpu=False) as sess: sp_input =", "sparse_tensor_lib.SparseTensor(indices, values, shape) st_handles = add_many_sparse_to_tensors_map(st) st_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=st_handles.op,", "combined_indices, combined_values, combined_shape = self.evaluate( sp_roundtrip) self.assertAllEqual(combined_indices[:6, 0], [0] *", "handle1_value = sess.run(handle, feed_dict={sp_input: input1_val}) handle_concat = ops.convert_to_tensor( [handle0_value, handle1_value],", "\"License\"); # you may not use this file except in", "5, 6]) def testAddManyTakeManyRoundTrip(self): with self.session(use_gpu=False) as sess: # N", "st_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=st_handles.op, sparse_handles=st_handles) st_roundtrip_op = st_roundtrip.values.op st_serialized =", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "= ind[permutation] val = val[permutation] shape = np.array([5, 6]).astype(np.int64) return", "3]]).astype(np.int64) val = np.array([0, 10, 13, 14, 32, 33]).astype(np.int32) ind", "handle = add_sparse_to_tensors_map(sp_input) handle0_value = sess.run(handle, feed_dict={sp_input: input0_val}) handle1_value =", "shared_name=\"a\") handle1 = add_sparse_to_tensors_map(sp_input1, shared_name=\"a\") self.assertEqual(handle0.get_shape(), ()) handles_concat = array_ops.stack([handle0,", "# distributed under the License is distributed on an \"AS", "self.assertAllEqual(combined_values[:6], input0_val[1]) self.assertAllEqual(combined_values[6:], input1_val[1]) self.assertAllEqual(combined_shape, [2, 5, 6]) def testAddManyTakeManyRoundTrip(self):", "import print_function import numpy as np from tensorflow.python.client import session", "testDeserializeFailsInconsistentRank(self): with self.session(use_gpu=False) as sess: sp_input = self._SparseTensorPlaceholder() input0_val =", "= sparse_ops._add_sparse_to_tensors_map add_many_sparse_to_tensors_map = sparse_ops._add_many_sparse_to_tensors_map take_many_sparse_from_tensors_map = ( sparse_ops._take_many_sparse_from_tensors_map) #", "# Unless required by applicable law or agreed to in", "__future__ import division from __future__ import print_function import numpy as", "use_gpu=False) as sess: sp_input0 = self._SparseTensorValue_5x6(np.arange(6)) sp_input1 = self._SparseTensorValue_3x4(np.arange(6)) handle0", "def _SparseTensorPlaceholder(self, dtype=None): if dtype is None: dtype = dtypes.int32", "and # limitations under the License. # ============================================================================== \"\"\"Tests for", "self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_1x1x1() handle = add_sparse_to_tensors_map(sp_input) handle0_value = sess.run(handle,", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "take_many_sparse_from_tensors_map( sparse_map_op=handle0.op, sparse_handles=handles_concat) combined_indices, combined_values, combined_shape = self.evaluate(sp_out) self.assertAllEqual(combined_indices[:6, 0],", "sparse_tensor_lib.SparseTensorValue(ind, val, shape) def _SparseTensorValue_3x4(self, permutation): ind = np.array([[0, 0],", "np.array([[0, 0], [1, 0], [1, 2], [1, 3], [2, 2],", "You may obtain a copy of the License at #", "sess: # N == 4 because shape_value == [4, 5]", "sp_input1 = self._SparseTensorValue_3x4(np.arange(6)) handle0 = add_sparse_to_tensors_map(sp_input0, shared_name=\"a\") handle1 = add_sparse_to_tensors_map(sp_input1,", "sess: with ops.device(\"/cpu:0\"): indices = variables.Variable(indices) values = variables.Variable(values) shape", "sparse_handles=sparse_handles) combined_indices, combined_values, combined_shape = self.evaluate( sp_roundtrip) self.assertAllEqual(combined_indices[:6, 0], [0]", "sess: sp_input = self._SparseTensorPlaceholder() input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_3x4(np.arange(6))", "values_value, sparse_tensor.dense_shape: shape_value }) self.assertEqual(handles_value.shape, (4,)) self.assertAllEqual(roundtrip_value.indices, indices_value) self.assertAllEqual(roundtrip_value.values, values_value)", "* 6) # minibatch 1 self.assertAllEqual(combined_indices[6:, 1:], sp_input1[0]) self.assertAllEqual(combined_values[:6], sp_input0[1])", "sp_input = self._SparseTensorPlaceholder() input0_val = self._SparseTensorValue_5x6(np.arange(6)) input1_val = self._SparseTensorValue_1x1x1() handle", "from __future__ import print_function import numpy as np from tensorflow.python.client", "def testAddTakeMany(self): with self.session(graph=ops.Graph(), use_gpu=False) as sess: sp_input0 = self._SparseTensorValue_5x6(np.arange(6))", "# Copyright 2015 The TensorFlow Authors. All Rights Reserved. #", "st_deserialized_values.dense_shape) self.run_op_benchmark( sess, st_roundtrip_op, min_iters=2000, name=\"benchmark_very_large_2d_float_st_tensor_maps\") self.run_op_benchmark( sess, st_deserialized_op, min_iters=2000,", "sparse_ops._add_sparse_to_tensors_map add_many_sparse_to_tensors_map = sparse_ops._add_many_sparse_to_tensors_map take_many_sparse_from_tensors_map = ( sparse_ops._take_many_sparse_from_tensors_map) # pylint:", "the Apache License, Version 2.0 (the \"License\"); # you may", "with self.session(use_gpu=False) as sess: sp_input = self._SparseTensorPlaceholder() input0_val = self._SparseTensorValue_5x6(np.arange(6))", "1:], input1_val[0]) self.assertAllEqual(combined_values[:6], input0_val[1]) self.assertAllEqual(combined_values[6:], input1_val[1]) self.assertAllEqual(combined_shape, [2, 5, 6])", "= np.array([[0, 0], [0, 1], [2, 0]], dtype=np.int64) values_value =", "2], [3, 3]]).astype(np.int64) val = np.array([0, 10, 13, 14, 32,", "self.assertAllEqual(roundtrip_value.values, values_value) self.assertAllEqual(roundtrip_value.dense_shape, shape_value) def testDeserializeFailsInconsistentRank(self): with self.session(use_gpu=False) as sess:", "self.session(use_gpu=False) as sess: input_val = self._SparseTensorValue_5x6(np.arange(6)) handle = add_sparse_to_tensors_map(input_val) handle_value", "= sparse_ops.deserialize_many_sparse( st_serialized, dtype=values.dtype) st_deserialized_op = st_deserialized.values.op variables.global_variables_initializer().run() st_roundtrip_values =", "= ops.convert_to_tensor( [handle0_value, handle1_value], dtype=dtypes.int64) sp_roundtrip = take_many_sparse_from_tensors_map( sparse_map_op=handle.op, sparse_handles=sparse_handles)", "[1, 2], [1, 3], [2, 2], [2, 3]]).astype(np.int64) val =", "10\"): self.evaluate(sp_roundtrip) class BenchmarkSparseTensorsMapVsSerialization(test.Benchmark): def benchmarkVeryLarge2DFloatSparseTensor(self): np.random.seed(127) num_elements = 10000", "[1, 0], [1, 3], [1, 4], [3, 2], [3, 3]]).astype(np.int64)" ]
[ "response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) headers =", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_deletion_with_valid_object( self, object_type, generate_object): container_name = self.create_temp_container(", "self.assertIn( 'X-Object-Meta-Foo', response.headers, msg=\"object updated with X-Object-Meta-Foo header\") expected =", "self.default_obj_name) self.assertIn( 'Access-Control-Allow-Credentials', response.headers, msg=\"Access-Control-Allow-Credentials header was set\") expected =", "response = self.client.delete_object( container_name, object_name) method = 'delete object' expected", "Access-Control-Allow-Origin header' expected = 201 received = response.status_code self.assertEqual( expected,", "generate_object(container_name, object1_name, headers=object1_headers) response = self.client.get_object( container_name, object1_name) expected =", "method = 'object retrieval with if unmodified since header' expected", "container_name, self.default_obj_name, headers=headers) method = 'object retrieval with if match", "set\") expected = 'attachment; filename=testdata.txt' received = response.headers.get('Content-Disposition') self.assertEqual( expected,", "= self.client.create_object(container_name, object_name, data=object_data, headers=headers) method = 'object creation with", "method = 'set object metadata X-Object-Meta-Grok: Drok' expected = 202", "data' updated_content_length = str(len(updated_object_data)) headers = {'Content-Length': updated_content_length, 'Content-Type': CONTENT_TYPES.get('text')}", "type: {0}' ' received: {1}'.format(expected, received)) def test_object_creation_via_chunked_transfer(self): \"\"\" Scenario:", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Method', response.headers,", "object_name) headers = {'If-Modified-Since': 'Fri, 17 Aug 2101 18:44:42 GMT'}", "@data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def test_object_creation_with_uppercase_etag(self): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_copy_object(self, object_type, generate_object): src_container_name =", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) dest_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name,", "object_info.get('md5'), response_md5, msg='should return identical object') @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_update_with_valid_object_name(", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_etag( self, object_type, generate_object):", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object( dest_container_name,", "def ddtest_object_update_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "= 'partial object retrieval with complete range' expected = 200", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Request-Headers': 'x-requested-with'} object_info =", "expected = 'true' received = response.headers.get('Access-Control-Allow-Credentials') self.assertEqual( expected, received, msg='object", "name' expected = 201 received = response.status_code self.assertEqual( expected, received,", "GMT'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'object", "msg=\"X-Delete-At header was set\") @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object_versioning') def ddtest_versioned_container_creation_with_valid_data( self, object_type,", "response = self.client.get_object( dest_container_name, dest_object_name) method = 'copied object retrieval'", "'object creation with Access-Control-Allow-Methods header' expected = 201 received =", "\"Test chunk %s\\r\\n\" % i @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_valid_object_name( self, object_type,", "with Access-Control-Allow-Origin header' ' value expected: {0} received: {1}'.format( expected,", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "received, msg='object X-Object-Meta-Grok header value expected: {0}' ' received: {1}'.format(expected,", "= 200 received = object_response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method,", "ANY KIND, either express or implied. See the License for", "{0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_content_disposition( self, object_type, generate_object):", "self.default_obj_name) self.assertIn( 'Access-Control-Allow-Methods', response.headers, msg=\"Access-Control-Allow-Methods header was set\") expected =", "generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name def object_data_op(data,", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Max-Age': '5'} object_info =", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_deletion_with_valid_object( self, object_type,", "'list on versioned container' expected = 200 received = response.status_code", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Foo', response.headers, msg=\"object updated with X-Object-Meta-Foo", "header' ' value expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList())", "since header' expected = 200 received = response.status_code self.assertEqual( expected,", "received)) headers = {'X-Object-Meta-Foo': 'Bar'} response = self.client.set_object_metadata( container_name, self.default_obj_name,", "with Access-Control-Allow-Origin header' expected = 201 received = response.status_code self.assertEqual(", "{0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_delete_after( self, object_type, generate_object):", "def test_object_creation_via_chunked_transfer(self): \"\"\" Scenario: Create an object using chunked transfer", "= self.default_obj_name generate_object(container_name, object_name, headers={'X-Object-Meta-Grok': 'Drok'}) response = self.client.get_object_metadata( container_name,", "Aug 2001 18:44:42 GMT'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers)", "= response.headers.get('Access-Control-Allow-Origin') self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Origin header'", "response = self.client.get_object(container_name, object_name) method = 'object creation with valid", "msg='object created with Access-Control-Allow-Origin header' ' value expected: {0} received:", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_start_range( self, object_type,", "object1_name) expected = 'application/x-www-form-urlencoded' received = response.headers.get('content-type') self.assertEqual( expected, received,", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Request-Method': 'GET'} object_info =", "object_response = self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag', object_response.headers, msg=\"Etag header", "and limitations under the License. \"\"\" import calendar import time", "with Access-Control-Max-Age header' ' value expected: {0} received: {1}'.format( expected,", "{0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_creation_with_file_compression( self, object_type,", "method=method, expected=expected, received=str(received))) response = self.client.list_objects(object_history_container_name) method = 'list on", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Headers', response.headers, msg=\"Access-Control-Request-Headers", "Access-Control-Request-Headers header' ' value expected: {0} received: {1}'.format( expected, received))", "'object retrieval' expected = 200 received = response.status_code self.assertEqual( expected,", "with Content-Encoding header' expected = 201 received = response.status_code self.assertEqual(", "self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'object retrieval with if", "in range(10): yield \"Test chunk %s\\r\\n\" % i @data_driven_test(ObjectDatasetList()) def", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'If-Unmodified-Since':", "= 'object retrieval with if modified since header (past date)'", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Method',", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_unmodified_since( self, object_type,", "' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_x_delete_at( self,", "dest} response = self.client.copy_object( src_container_name, src_object_name, headers=headers) method = 'copy", "self.assertEqual( expected, received, msg='object X-Object-Meta-Foo header value expected: {0}' '", "msg=\"access-control-expose-headers header should be set\") self.assertIn( 'access-control-allow-origin', response.headers, msg=\"access-control-allow-origin header", "received=str(received))) headers = {'If-None-Match': object_info.get('etag')} response = self.client.get_object( container_name, self.default_obj_name,", "expected = 201 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format(", "super(ObjectSmokeTest, cls).setUpClass() cls.default_obj_name = Constants.VALID_OBJECT_NAME_WITH_UNICODE @staticmethod def generate_chunk_data(): for i", "copy object' expected = 201 received = response.status_code self.assertEqual( expected,", "self.client.create_object( container_name, self.default_obj_name, headers=headers, data=updated_object_data) method = 'object update with", "'object retrieval with if unmodified since header' expected = 200", "response.headers, msg=\"object not created with X-Object-Meta-Grok header\") expected = 'Drok'", "def ddtest_object_creation_with_access_control_allow_credentials( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "response_md5, msg='should return identical object') @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_update_with_valid_object_name( self,", "received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_expose_headers( self, object_type, generate_object): container_name =", "import ( ObjectDatasetList, CONTENT_TYPES) CONTAINER_DESCRIPTOR = 'object_smoke_test' STATUS_CODE_MSG = ('{method}", "{1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_delete_after( self, object_type, generate_object): container_name =", "response = object_info.get('response') method = 'object creation with Access-Control-Allow-Origin header'", "'5'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method", "{0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_not_detected_without_detect_content_type_header( self, object_type,", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Request-Headers': 'x-requested-with'}", "date)' expected = 304 received = response.status_code self.assertEqual( expected, received,", "= {'X-Object-Meta-Foo': 'Bar'} response = self.client.set_object_metadata( container_name, self.default_obj_name, headers=headers) method", "See the License for the specific language governing permissions and", "'access-control-expose-headers': 'X-Trans-Id'} generate_object(container_name, object_name, headers=headers) headers = {'Origin': 'http://example.com'} response", "'Drok' received = response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received, msg='object X-Object-Meta-Grok header", "= {'Access-Control-Request-Headers': 'x-requested-with'} object_info = generate_object(container_name, object_name, headers=object_headers) response =", "import \\ Constants from cloudroast.objectstorage.fixtures import ObjectStorageFixture from cloudroast.objectstorage.generators import", "expected = 'gzip' received = response.headers.get('Content-Encoding') self.assertEqual( expected, received, msg='object", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_fails_with_if_unmodified_since( self, object_type,", "= {'Access-Control-Max-Age': '5'} object_info = generate_object(container_name, object_name, headers=object_headers) response =", "{1}'.format(expected, received)) headers = {'X-Object-Meta-Foo': 'Bar'} response = self.client.set_object_metadata( container_name,", "container_name, object2_name) expected = 'text/plain' received = response.headers.get('content-type') self.assertEqual( expected,", "Access-Control-Request-Method header' expected = 201 received = response.status_code self.assertEqual( expected,", "with Content-Encoding header value' ' expected: {0} received: {1}'.format(expected, received))", "range(10): yield \"Test chunk %s\\r\\n\" % i @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_valid_object_name(", "generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name start_time =", "headers=object_headers) response = object_info.get('response') method = 'object creation with Access-Control-Expose-Headers", "\"chunked\"} create_response = self.client.create_object( container_name, self.default_obj_name, headers=headers, data=self.generate_chunk_data()) method =", "= self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'object should be", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_valid_object_name( self, object_type, generate_object): container_name", "object metadata X-Object-Meta-Grok: Drok' expected = 202 received = response.status_code", "else: expected = '\"{0}\"'.format(object_info.get('etag')) received = response.headers.get('etag') self.assertEqual( expected, received,", "response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = ('object retrieval", "received = object_response.headers.get('etag') self.assertEqual( expected, received, msg='object created with Etag", "= self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag', response.headers, msg=\"Etag header was", "headers = {'If-None-Match': object_info.get('etag')} response = self.client.get_object( container_name, self.default_obj_name, headers=headers)", "ddtest_object_creation_with_access_control_allow_credentials( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "self.client.get_object(container_name, self.default_obj_name) method = 'Object retrieval' expected = 200 received", "= 'object creation with X-Delete-After header' expected = 201 received", "= zlib.compress(data) return (data, extra_data) object_headers = {'Content-Encoding': 'gzip'} object_info", "@ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_headers( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "object_type, generate_object): src_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) dest_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "self.default_obj_name generate_object(container_name, object_name) headers = {'If-Unmodified-Since': 'Fri, 17 Aug 2101", "{'Content-Encoding': 'gzip'} object_info = generate_object(container_name, object_name, data_op=object_data_op, headers=object_headers) response =", "method=method, expected=expected, received=str(received))) headers = {'If-None-Match': object_info.get('etag')} response = self.client.get_object(", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Method', response.headers, msg=\"Access-Control-Request-Method header was", "= self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=-4'} response =", "msg='object created with Access-Control-Allow-Methods header' ' value expected: {0} received:", "permissions and limitations under the License. \"\"\" import calendar import", "object retrieval with end range' expected = 206 received =", "one creation' expected = 201 received = response.status_code self.assertEqual( expected,", "object_name, headers=headers) self.assertIn( 'access-control-expose-headers', response.headers, msg=\"access-control-expose-headers header should be set\")", "container_name, self.default_obj_name, headers=headers) method = 'object retrieval with if unmodified", "object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name start_time", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) response = self.client.delete_object(", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Max-Age', response.headers, msg=\"Access-Control-Max-Age", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Delete-At', response.headers,", "received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_none_match( self, object_type, generate_object): \"\"\" Bug", "Version 2.0 (the \"License\"); you may not use this file", "be set\") expected = 'http://example.com' received = response.headers.get('access-control-allow-origin') self.assertEqual( expected,", "container_name, object_name) method = 'delete object' expected = 204 received", "expected = 'x-requested-with' received = response.headers.get('Access-Control-Request-Headers') self.assertEqual( expected, received, msg='object", "@data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object_versioning') def ddtest_versioned_container_creation_with_valid_data( self, object_type, generate_object): container_name = self.create_temp_container(", "not use this file except in compliance with the License.", "response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) # Update", "you may not use this file except in compliance with", "response.headers.get('Access-Control-Expose-Headers') self.assertEqual( expected, received, msg='object created with Access-Control-Expose-Headers header' '", "Drok' expected = 202 received = response.status_code self.assertEqual( expected, received,", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "with Etag header' ' value expected: {0} received: {1}'.format( expected,", "the License. You may obtain a copy of the License", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'X-Object-Meta-Grok':", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Encoding', response.headers, msg=\"Content-Encoding header was set\")", "object') @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_update_with_valid_object_name( self, object_type, generate_object): container_name =", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name obj_info = generate_object(container_name, object_name) headers", "end range' expected = 206 received = response.status_code self.assertEqual( expected,", "object2_name, headers=object2_headers) response = self.client.get_object( container_name, object1_name) expected = 'application/x-www-form-urlencoded'", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_end_range( self,", "received = response.headers.get('etag') self.assertEqual( expected, received, msg='object created with Etag", "self.default_obj_name object_headers = {'Access-Control-Expose-Headers': 'X-Foo-Header'} object_info = generate_object(container_name, object_name, headers=object_headers)", "non-current container response = self.client.list_objects( object_history_container_name) method = 'list on", "if match header' expected = 200 received = response.status_code self.assertEqual(", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name = 'object1.txt' object1_headers = {'X-Detect-Content-Type':", "= self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=5-8'} response =", "= {'If-Modified-Since': 'Fri, 17 Aug 2001 18:44:42 GMT'} response =", "via chunked transfer' expected = 201 received = create_response.status_code self.assertEqual(", "headers = {'Destination': dest} response = self.client.copy_object( src_container_name, src_object_name, headers=headers)", "headers = {'If-None-Match': 'grok'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers)", "def ddtest_object_creation_with_x_delete_at( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received)))", "container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object updated with X-Object-Meta-Grok header\")", "'partial object retrieval with complete range' expected = 200 received", "= response.headers.get('Access-Control-Request-Method') self.assertEqual( expected, received, msg='object created with Access-Control-Request-Method header'", "@data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_methods( self, object_type, generate_object): container_name = self.create_temp_container(", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Foo',", "expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_headers(", "msg=\"X-Delete-At header was set\") expected = future_time received = response.headers.get('X-Delete-At')", "as not modified' expected = 304 received = response.status_code self.assertEqual(", "self.default_obj_name object_headers = { 'Access-Control-Allow-Origin': 'http://example.com'} object_info = generate_object(container_name, object_name,", "POST, OPTIONS' received = response.headers.get('Access-Control-Allow-Methods') self.assertEqual( expected, received, msg='object created", "object_headers = {'X-Delete-After': '60'} object_info = generate_object(container_name, object_name, headers=object_headers) response", "object_name = self.default_obj_name start_time = calendar.timegm(time.gmtime()) future_time = str(int(start_time +", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_start_range( self, object_type, generate_object):", "generate_object(container_name, object_name) updated_object_data = 'Updated test file data' updated_content_length =", "name' expected = 200 received = response.status_code self.assertEqual( expected, received,", "= {'If-Match': obj_info.get('etag')} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method", "self.default_obj_name obj_info = generate_object(container_name, object_name) headers = {'If-Match': obj_info.get('etag')} response", "response = self.client.copy_object( dest_container_name, dest_obj_name, headers=hdrs) method = 'put copy", "response = self.client.get_object( container_name, self.default_obj_name) method = 'object retrieval' expected", "method=method, expected=expected, received=str(received))) response = self.client.get_object( dest_container_name, dest_object_name) method =", "\"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_info =", "@staticmethod def generate_chunk_data(): for i in range(10): yield \"Test chunk", "STATUS_CODE_MSG = ('{method} expected status code {expected}' ' received status", "object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers", "msg=\"Access-Control-Allow-Credentials header was set\") expected = 'true' received = response.headers.get('Access-Control-Allow-Credentials')", "generate_object(container_name, object1_name, headers=object1_headers) object2_name = 'object2.txt' object2_headers = {'X-Detect-Content-Type': False,", "was set\") expected = 'GET' received = response.headers.get('Access-Control-Request-Method') self.assertEqual( expected,", "object_name = self.default_obj_name object_headers = {'Access-Control-Expose-Headers': 'X-Foo-Header'} object_info = generate_object(container_name,", "md5(response.content).hexdigest() self.assertEqual( object_info.get('md5'), response_md5, msg='should return identical object') @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo']))", "'slo'])) def test_object_creation_with_uppercase_etag(self): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name", "received = response.headers.get('Access-Control-Allow-Credentials') self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Credentials", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Max-Age', response.headers, msg=\"Access-Control-Max-Age header was set\")", "'slo'])) def ddtest_object_retrieval_with_if_none_match( self, object_type, generate_object): \"\"\" Bug filed for", "self.default_obj_name, headers=headers) method = 'object retrieval with if modified since", "= 'text/plain' received = response.headers.get('content-type') self.assertEqual( expected, received, msg='object created", "cafe.drivers.unittest.decorators import ( DataDrivenFixture, data_driven_test) from cloudcafe.objectstorage.objectstorage_api.common.constants import \\ Constants", "\"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {\"Transfer-Encoding\": \"chunked\"} create_response", "range' expected = 200 received = response.status_code self.assertEqual( expected, received,", "test_object_creation_with_uppercase_etag(self): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_data =", "{'Range': 'bytes=-4'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method =", "from cloudroast.objectstorage.generators import ( ObjectDatasetList, CONTENT_TYPES) CONTAINER_DESCRIPTOR = 'object_smoke_test' STATUS_CODE_MSG", "an object (version 2) object_name = self.default_obj_name ver2_info = generate_object(container_name,", "law or agreed to in writing, software distributed under the", "response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo']))", "self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag', object_response.headers, msg=\"Etag header was set\")", "= {'Range': 'bytes=-4'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method", "received: {1}'.format(expected, received)) headers = {'X-Object-Meta-Foo': 'Bar'} response = self.client.set_object_metadata(", "headers = {'If-Modified-Since': 'Fri, 17 Aug 2001 18:44:42 GMT'} response", "self.client.get_object( container_name, object1_name) expected = 'text/plain' received = response.headers.get('content-type') self.assertEqual(", "object_name = self.default_obj_name object_info = generate_object(container_name, object_name) headers = {'If-None-Match':", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name obj_info = generate_object(container_name, object_name)", "received=str(received))) response = self.client.get_object( container_name, self.default_obj_name) method = 'object retrieval'", "header was set\") expected = future_time received = response.headers.get('X-Delete-At') self.assertEqual(", "= data_md5.upper() headers = {\"ETag\": upper_etag} create_response = self.client.create_object(container_name, object_name,", "# list objects in non-current container response = self.client.list_objects( object_history_container_name)", "{0}' ' received: {1}'.format(expected, received)) object2_name = 'object2.txt' object2_headers =", "def ddtest_object_creation_with_access_control_request_method( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_content_disposition( self, object_type,", "header was set\") if object_type == 'standard': expected = object_info.get('etag')", "201 received = create_response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected,", "ObjectSmokeTest(ObjectStorageFixture): @classmethod def setUpClass(cls): super(ObjectSmokeTest, cls).setUpClass() cls.default_obj_name = Constants.VALID_OBJECT_NAME_WITH_UNICODE @staticmethod", "generate_object(src_container_name, src_object_name) dest_object_name = '{0}_destination'.format(self.default_obj_name) dest = '/{0}/{1}'.format(dest_container_name, dest_object_name) headers", "headers=headers) method = 'copy object' expected = 201 received =", "self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name = 'object1.txt'", "set\") expected = 'x-requested-with' received = response.headers.get('Access-Control-Request-Headers') self.assertEqual( expected, received,", "'set object metadata X-Object-Meta-Grok: Drok' expected = 202 received =", "' received status code {received}') @DataDrivenFixture class ObjectSmokeTest(ObjectStorageFixture): @classmethod def", "file data' updated_content_length = str(len(updated_object_data)) headers = {'Content-Length': updated_content_length, 'Content-Type':", "data_md5 = md5(object_data).hexdigest() upper_etag = data_md5.upper() headers = {\"ETag\": upper_etag}", "response.headers, msg=\"Access-Control-Request-Method header was set\") expected = 'GET' received =", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name, headers={'X-Object-Meta-Grok': 'Drok'}) response", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'If-Modified-Since': 'Fri,", "def test_object_creation_with_uppercase_etag(self): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_data", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_unmodified_since( self, object_type, generate_object): container_name =", "header was set\") expected = '5' received = response.headers.get('Access-Control-Max-Age') self.assertEqual(", "be flagged as not modified' expected = 304 received =", "'object creation with Access-Control-Request-Method header' expected = 201 received =", "@data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_expose_headers( self, object_type, generate_object): container_name = self.create_temp_container(", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Origin', response.headers,", "with if modified since header (past date)' expected = 200", "= self.default_obj_name object_headers = {'Access-Control-Max-Age': '5'} object_info = generate_object(container_name, object_name,", "= response.headers.get('content-type') self.assertEqual( expected, received, msg='object created should have content", "'http://example.com' received = response.headers.get('Access-Control-Allow-Origin') self.assertEqual( expected, received, msg='object created with", "'Bar'} response = self.client.set_object_metadata( container_name, self.default_obj_name, headers=headers) method = 'set", "= self.default_obj_name generate_object(container_name, object_name) updated_object_data = 'Updated test file data'", "etag header' expected = 201 received = create_response.status_code self.assertEqual( expected,", "'application/x-www-form-urlencoded'} generate_object(container_name, object2_name, headers=object2_headers) response = self.client.get_object( container_name, object1_name) expected", "since header') expected = 412 received = response.status_code self.assertEqual( expected,", "received, msg='object created with X-Delete-At header value' ' expected: {0}", "'X-Object-Meta-Grok', response.headers, msg=\"object updated with X-Object-Meta-Grok header\") expected = 'Drok'", "= {'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) object2_name = 'object2.txt' object2_headers", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Max-Age': '5'}", "set\") self.assertIn( 'access-control-allow-origin', response.headers, msg=\"access-control-allow-origin header should be set\") expected", "= self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=5-'} response =", "ddtest_obj_metadata_update(self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name", "received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_method( self, object_type, generate_object): container_name =", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container(", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update_with_object_possessing_metadata( self, object_type, generate_object):", "object_headers = {'X-Delete-At': future_time} object_info = generate_object(container_name, object_name, headers=object_headers) response", "not created with X-Object-Meta-Grok header\") expected = 'Drok' received =", "response.headers, msg=\"Access-Control-Max-Age header was set\") expected = '5' received =", "value expected: {0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update(self,", "{'Range': 'bytes=5-8'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method =", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers", "( DataDrivenFixture, data_driven_test) from cloudcafe.objectstorage.objectstorage_api.common.constants import \\ Constants from cloudroast.objectstorage.fixtures", "self.default_obj_name object_headers = { 'Content-Disposition': 'attachment; filename=testdata.txt'} object_info = generate_object(container_name,", "'put copy object' expected = 201 received = response.status_code self.assertEqual(", "under the License. \"\"\" import calendar import time import zlib", "'slo'])) def ddtest_object_retrieval_with_if_match( self, object_type, generate_object): \"\"\" Bug filed for", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "= 'partial object retrieval with start range' expected = 206", "method = 'object creation with valid object name' expected =", "self.default_obj_name generate_object(container_name, object_name) headers = {'If-Unmodified-Since': 'Fri, 17 Aug 2001", "on empty versioned container' expected = 204 received = response.status_code", "object_name = self.default_obj_name headers = {'access-control-allow-origin': 'http://example.com', 'access-control-expose-headers': 'X-Trans-Id'} generate_object(container_name,", "expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_creation_with_file_compression( self,", "msg=\"object updated with X-Object-Meta-Foo header\") expected = 'Bar' received =", "= response.headers.get('etag') self.assertEqual( expected, received, msg='object created with Etag header'", "received = response.headers.get('access-control-allow-origin') self.assertEqual( expected, received, msg='access-control-allow-origin header should reflect", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "list objects in non-current container response = self.client.list_objects( object_history_container_name) method", "def setUpClass(cls): super(ObjectSmokeTest, cls).setUpClass() cls.default_obj_name = Constants.VALID_OBJECT_NAME_WITH_UNICODE @staticmethod def generate_chunk_data():", "'object creation with Access-Control-Allow-Origin header' expected = 201 received =", "expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_method(", "received, msg='object created with Access-Control-Allow-Credentials header' ' value expected: {0}", "method = ('object retrieval precondition fail with if unmodified' '", "{'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) object2_name = 'object2.txt' object2_headers =", "= 'copied object retrieval' expected = 200 received = response.status_code", "extra_data): data = zlib.compress(data) return (data, extra_data) object_headers = {'Content-Encoding':", "self.default_obj_name) self.assertIn( 'X-Delete-At', response.headers, msg=\"X-Delete-At header was set\") expected =", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_modified_since( self, object_type,", "expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_retrieval_with_origin( self, object_type, generate_object): container_name", "object_headers = {'Access-Control-Max-Age': '5'} object_info = generate_object(container_name, object_name, headers=object_headers) response", "container_name, self.default_obj_name) response = self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag', response.headers,", "{1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_credentials( self, object_type, generate_object):", "expected, received, msg='object X-Object-Meta-Grok header value expected: {0}' ' received:", "self.default_obj_name object_headers = {'Access-Control-Request-Headers': 'x-requested-with'} object_info = generate_object(container_name, object_name, headers=object_headers)", "received=str(received))) @unittest.skip('Problem with this tests assertion, needs review') @data_driven_test(ObjectDatasetList()) def", "container_name, self.default_obj_name) self.assertIn( 'Access-Control-Max-Age', response.headers, msg=\"Access-Control-Max-Age header was set\") expected", "'object retrieval with if match header' expected = 200 received", "if object_type == 'standard': expected = object_info.get('etag') else: expected =", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Encoding', response.headers, msg=\"Content-Encoding", "received=str(received))) response = self.client.get_object( dest_container_name, dest_object_name) method = 'copied object", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @unittest.skip('Problem with this", "object_type, generate_object): \"\"\" Bug filed for dlo/slo support of If-match", "retrieval with start and end range' expected = 206 received", "@ObjectStorageFixture.required_features('object_versioning') def ddtest_versioned_container_creation_with_valid_data( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_fails_with_if_unmodified_since( self, object_type, generate_object): container_name =", "{\"ETag\": upper_etag} create_response = self.client.create_object(container_name, object_name, data=object_data, headers=headers) method =", "Results: Return a 201 status code and a single object", "= self.client.get_object( container_name, object1_name) expected = 'text/plain' received = response.headers.get('content-type')", "except in compliance with the License. You may obtain a", "set\") expected = 'GET' received = response.headers.get('Access-Control-Request-Method') self.assertEqual( expected, received,", "= 'object creation with Access-Control-Allow-Credentials header' expected = 201 received", "object_info.get('response') method = 'object creation with Content-Encoding header' expected =", "self.assertEqual( expected, received, msg='object created with Access-Control-Request-Headers header' ' value", "language governing permissions and limitations under the License. \"\"\" import", "expected=expected, received=str(received))) response = self.client.list_objects(object_history_container_name) method = 'list on versioned", "received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_detected_with_detect_content_type( self, object_type, generate_object): container_name", "object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method =", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_end_range( self, object_type, generate_object): container_name = self.create_temp_container(", "= self.default_obj_name object_headers = {'Access-Control-Request-Method': 'GET'} object_info = generate_object(container_name, object_name,", "method = 'object creation with etag header' expected = 201", "= 'object retrieval with if unmodified since header' expected =", "headers=headers, data=self.generate_chunk_data()) method = 'Object creation via chunked transfer' expected", "expected = 206 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format(", "object_name = self.default_obj_name generate_object(container_name, object_name) updated_object_data = 'Updated test file", "@ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_controle_max_age( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def test_object_creation_with_uppercase_etag(self):", "object_data_op(data, extra_data): data = zlib.compress(data) return (data, extra_data) object_headers =", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_range( self, object_type, generate_object):", "container_name, self.default_obj_name, headers=headers) method = 'set object metadata' expected =", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {", "self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name", "'object creation with Access-Control-Max-Age header' expected = 201 received =", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_etag( self,", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) object_response = self.client.get_object(container_name, self.default_obj_name)", "self.default_obj_name) self.assertIn( 'etag', object_response.headers, msg=\"Etag header was set\") expected =", "'/{0}/{1}'.format(src_container_name, src_object_name) hdrs = {'X-Copy-From': source, 'Content-Length': '0'} response =", "= self.client.get_object_metadata( container_name, object_name, headers=headers) self.assertIn( 'access-control-expose-headers', response.headers, msg=\"access-control-expose-headers header", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_info = generate_object(container_name, object_name) response =", "data=object_data, headers=headers) method = 'object creation with uppercase etag header'", "@DataDrivenFixture class ObjectSmokeTest(ObjectStorageFixture): @classmethod def setUpClass(cls): super(ObjectSmokeTest, cls).setUpClass() cls.default_obj_name =", "ver1_info = generate_object(container_name, object_name) response = ver1_info.get('response') method = 'object", "Constants from cloudroast.objectstorage.fixtures import ObjectStorageFixture from cloudroast.objectstorage.generators import ( ObjectDatasetList,", "received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update(self, object_type, generate_object): container_name =", "received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_expose_headers( self, object_type,", "{'Access-Control-Request-Method': 'GET'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response')", "response = object_info.get('response') method = 'object creation with etag header'", "@data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update(self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "self.client.get_object(container_name, object_name) method = 'object creation with valid object name'", "{'Range': 'bytes=5-'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method =", "= generate_object(container_name, object_name) response = ver1_info.get('response') method = 'object version", "received: {1}'.format(expected, received)) response = self.client.get_object( container_name, object2_name) self.assertEqual( expected,", "response.headers.get('Content-Encoding') self.assertEqual( expected, received, msg='object created with Content-Encoding header value'", "Constants.VALID_OBJECT_NAME_WITH_UNICODE @staticmethod def generate_chunk_data(): for i in range(10): yield \"Test", "18:44:42 GMT'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method =", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Request-Method': 'GET'} object_info", "if modified since header (future date)' expected = 304 received", "self.default_obj_name generate_object(container_name, object_name, headers={'X-Object-Meta-Grok': 'Drok'}) response = self.client.get_object_metadata( container_name, object_name)", "'0'} response = self.client.copy_object( dest_container_name, dest_obj_name, headers=hdrs) method = 'put", "def ddtest_versioned_container_creation_with_valid_data( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_history_container_name", "object_headers = {'Content-Encoding': 'gzip'} object_info = generate_object(container_name, object_name, data_op=object_data_op, headers=object_headers)", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Max-Age', response.headers, msg=\"Access-Control-Max-Age header", "received=str(received))) # Update an object (version 2) object_name = self.default_obj_name", "https://bugs.launchpad.net/swift/+bug/1279076 \"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name obj_info", "(version 2) object_name = self.default_obj_name ver2_info = generate_object(container_name, object_name) response", "Unless required by applicable law or agreed to in writing,", "self.assertIn( 'Content-Encoding', response.headers, msg=\"Content-Encoding header was set\") expected = 'gzip'", "\"\"\" Scenario: Create an object using chunked transfer encoding. Expected", "with Access-Control-Max-Age header' expected = 201 received = response.status_code self.assertEqual(", "response = self.client.get_object_metadata( container_name, object_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object not", "with X-Object-Meta-Foo header\") expected = 'Bar' received = response.headers.get('X-Object-Meta-Foo') self.assertEqual(", "object2_name = 'object2.txt' object2_headers = {'X-Detect-Content-Type': True} generate_object(container_name, object2_name, headers=object2_headers)", "dest_obj_name) method = 'copied object retrieval' expected = 200 received", "2001 18:44:42 GMT'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name headers = {'access-control-allow-origin': 'http://example.com', 'access-control-expose-headers':", "object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name def", "= 'object version one creation' expected = 201 received =", "express or implied. See the License for the specific language", "was set\") expected = 'http://example.com' received = response.headers.get('Access-Control-Allow-Origin') self.assertEqual( expected,", "def generate_chunk_data(): for i in range(10): yield \"Test chunk %s\\r\\n\"", "self.client.get_object( container_name, self.default_obj_name, headers=headers) method = ('object retrieval precondition fail", "@data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "was set\") expected = 'GET, POST, OPTIONS' received = response.headers.get('Access-Control-Allow-Methods')", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Expose-Headers',", "' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_delete_after( self,", "valid object name' expected = 200 received = response.status_code self.assertEqual(", "method = 'object version one creation' expected = 201 received", "%s\\r\\n\" % i @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_valid_object_name( self, object_type, generate_object): container_name", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_not_modified_with_if_modified_since( self, object_type,", "expected = 'Drok' received = response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received, msg='object", "ddtest_copy_object(self, object_type, generate_object): src_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) dest_container_name = self.create_temp_container(", "def ddtest_object_retrieval_with_if_unmodified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "ddtest_object_retrieval_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "received = response.headers.get('X-Delete-At') self.assertEqual( expected, received, msg='object created with X-Delete-At", "dest_container_name, dest_object_name) method = 'copied object retrieval' expected = 200", "header') expected = 412 received = response.status_code self.assertEqual( expected, received,", "self.default_obj_name generate_object(container_name, object_name) headers = {'If-Modified-Since': 'Fri, 17 Aug 2101", "expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_methods(", "expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_headers( self, object_type, generate_object): container_name", "with uppercase etag header' expected = 201 received = create_response.status_code", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_unmodified_since(", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "@data_driven_test(ObjectDatasetList()) def ddtest_content_type_not_detected_without_detect_content_type_header( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Max-Age', response.headers,", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Expose-Headers', response.headers,", "received=str(received))) object_response = self.client.get_object(container_name, self.default_obj_name) method = 'Object retrieval' expected", "descriptor=CONTAINER_DESCRIPTOR) src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_obj_name = '{0}_destination'.format(self.default_obj_name) source", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = { 'Content-Disposition': 'attachment;", "should be created. \"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers =", "{'If-Unmodified-Since': 'Fri, 17 Aug 2001 18:44:42 GMT'} response = self.client.get_object(", "self.client.copy_object( dest_container_name, dest_obj_name, headers=hdrs) method = 'put copy object' expected", "src_object_name) dest_object_name = '{0}_destination'.format(self.default_obj_name) dest = '/{0}/{1}'.format(dest_container_name, dest_object_name) headers =", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Expose-Headers', response.headers, msg=\"Access-Control-Expose-Headers header was set\")", "headers=headers) self.assertIn( 'access-control-expose-headers', response.headers, msg=\"access-control-expose-headers header should be set\") self.assertIn(", "set\") @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object_versioning') def ddtest_versioned_container_creation_with_valid_data( self, object_type, generate_object): container_name =", "'bytes=99-0'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'partial", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Allow-Credentials':", "with Access-Control-Request-Headers header' expected = 201 received = response.status_code self.assertEqual(", "expected, received, msg='object created with X-Object-Meta-Grok header value' ' expected:", "@data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_end_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "@data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_fails_with_if_unmodified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "ddtest_object_creation_with_access_control_request_method( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "object_name, data_op=object_data_op, headers=object_headers) response = object_info.get('response') method = 'object creation", "= self.client.create_object( container_name, self.default_obj_name, headers=headers, data=updated_object_data) method = 'object update", "204 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected,", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_unmodified_since( self, object_type, generate_object):", "'Object creation via chunked transfer' expected = 201 received =", "received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_controle_max_age( self, object_type, generate_object): container_name =", "def ddtest_copy_object(self, object_type, generate_object): src_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) dest_container_name =", "= {'Content-Encoding': 'gzip'} object_info = generate_object(container_name, object_name, data_op=object_data_op, headers=object_headers) response", "expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_methods( self, object_type, generate_object): container_name", "X-Object-Meta-Grok header value expected: {0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList())", "cls).setUpClass() cls.default_obj_name = Constants.VALID_OBJECT_NAME_WITH_UNICODE @staticmethod def generate_chunk_data(): for i in", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) response = self.client.get_object(container_name,", "object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=99-0'} response", "container_name, self.default_obj_name) self.assertIn( 'etag', response.headers, msg=\"Etag header was set\") if", "= object_info.get('response') method = 'object creation with valid object name'", "generate_object(container_name, object_name, data_op=object_data_op, headers=object_headers) response = object_info.get('response') method = 'object", "{1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_x_delete_at( self, object_type, generate_object): container_name =", "ObjectDatasetList, CONTENT_TYPES) CONTAINER_DESCRIPTOR = 'object_smoke_test' STATUS_CODE_MSG = ('{method} expected status", "self.default_obj_name start_time = calendar.timegm(time.gmtime()) future_time = str(int(start_time + 60)) object_headers", "'Fri, 17 Aug 2001 18:44:42 GMT'} response = self.client.get_object( container_name,", "self.default_obj_name) method = 'object retrieval' expected = 200 received =", "CONTAINER_DESCRIPTOR = 'object_smoke_test' STATUS_CODE_MSG = ('{method} expected status code {expected}'", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_complete_range( self,", "{1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update(self, object_type, generate_object): container_name = self.create_temp_container(", "the specific language governing permissions and limitations under the License.", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name start_time = calendar.timegm(time.gmtime()) future_time = str(int(start_time", "= '/{0}/{1}'.format(dest_container_name, dest_object_name) headers = {'Destination': dest} response = self.client.copy_object(", "\"\"\" Copyright 2015 Rackspace Licensed under the Apache License, Version", "\"valid_data\" data_md5 = md5(object_data).hexdigest() upper_etag = data_md5.upper() headers = {\"ETag\":", "headers=object_headers) response = object_info.get('response') method = 'object creation with content", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Expose-Headers', response.headers, msg=\"Access-Control-Expose-Headers", "with etag header' expected = 201 received = response.status_code self.assertEqual(", "using chunked transfer encoding. Expected Results: Return a 201 status", "or agreed to in writing, software distributed under the License", "ddtest_object_creation_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) object_response = self.client.get_object(container_name, self.default_obj_name) method =", "object_info.get('response') method = 'object creation with Access-Control-Request-Headers header' expected =", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name obj_info = generate_object(container_name,", "method=method, expected=expected, received=str(received))) response = self.client.get_object( container_name, self.default_obj_name) method =", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_copy_object(self, object_type, generate_object):", "'Drok'} response = self.client.set_object_metadata( container_name, object_name, headers=headers) method = 'set", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name)", "= 'true' received = response.headers.get('Access-Control-Allow-Credentials') self.assertEqual( expected, received, msg='object created", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Headers', response.headers, msg=\"Access-Control-Request-Headers header", "object_name) headers = {'Range': 'bytes=99-0'} response = self.client.get_object( container_name, self.default_obj_name,", "self.assertIn( 'X-Delete-At', response.headers, msg=\"X-Delete-At header was set\") expected = future_time", "object_info.get('response') method = 'object creation with X-Delete-At header' expected =", "hdrs = {'X-Copy-From': source, 'Content-Length': '0'} response = self.client.copy_object( dest_container_name,", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_history_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers =", "descriptor=CONTAINER_DESCRIPTOR) object1_name = 'object1.txt' object1_headers = {'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name,", "expected=expected, received=str(received))) response_md5 = md5(response.content).hexdigest() self.assertEqual( object_info.get('md5'), response_md5, msg='should return", "= self.default_obj_name object_info = generate_object(container_name, object_name) headers = {'If-None-Match': 'grok'}", "= self.default_obj_name object_info = generate_object(container_name, object_name) response = object_info.get('response') method", "headers=headers) # list objects in non-current container response = self.client.list_objects(", "uppercase etag header' expected = 201 received = create_response.status_code self.assertEqual(", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) object_response = self.client.get_object( container_name, self.default_obj_name) self.assertIn(", "future_time} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method", "{received}') @DataDrivenFixture class ObjectSmokeTest(ObjectStorageFixture): @classmethod def setUpClass(cls): super(ObjectSmokeTest, cls).setUpClass() cls.default_obj_name", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "'object creation with Access-Control-Expose-Headers header' expected = 201 received =", "msg='object created should have content type: {0}' ' received: {1}'.format(expected,", "{'X-Detect-Content-Type': True} generate_object(container_name, object2_name, headers=object2_headers) response = self.client.get_object( container_name, object2_name)", "received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_creation_with_file_compression( self, object_type, generate_object):", "= 'object should be flagged as not modified' expected =", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_obj_name = '{0}_destination'.format(self.default_obj_name)", "'Drok' received = response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received, msg='object created with", "received = create_response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received)))", "was set\") expected = 'gzip' received = response.headers.get('Content-Encoding') self.assertEqual( expected,", "= object_info.get('response') method = 'object creation with Access-Control-Expose-Headers header' expected", "header' ' value expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo',", "def ddtest_object_retrieval_with_if_match( self, object_type, generate_object): \"\"\" Bug filed for dlo/slo", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata(", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Max-Age', response.headers, msg=\"Access-Control-Max-Age header was", "= 304 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method,", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_start_range(", "with Access-Control-Allow-Credentials header' expected = 201 received = response.status_code self.assertEqual(", "expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_expose_headers( self, object_type, generate_object): container_name", "'text/plain' received = response.headers.get('content-type') self.assertEqual( expected, received, msg='object created should", "Access-Control-Max-Age header' ' value expected: {0} received: {1}'.format( expected, received))", "= 'object creation with etag header' expected = 201 received", "header was set\") expected = data_md5 received = object_response.headers.get('etag') self.assertEqual(", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.list_objects(object_history_container_name) method =", "import calendar import time import zlib from hashlib import md5", "generate_object(container_name, object_name) response = object_info.get('response') method = 'object creation with", "creation with Content-Encoding header' expected = 201 received = response.status_code", "= 'object creation with content disposition header' expected = 201", "extra_data) object_headers = {'Content-Encoding': 'gzip'} object_info = generate_object(container_name, object_name, data_op=object_data_op,", "status code {received}') @DataDrivenFixture class ObjectSmokeTest(ObjectStorageFixture): @classmethod def setUpClass(cls): super(ObjectSmokeTest,", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_modified_since( self, object_type, generate_object): container_name", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'If-Modified-Since':", "{1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_origin( self, object_type, generate_object):", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Headers',", "object_name) updated_object_data = 'Updated test file data' updated_content_length = str(len(updated_object_data))", "@data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_start_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "= 404 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method,", "should have content type: {0}' ' received: {1}'.format(expected, received)) response", "(past date)' expected = 200 received = response.status_code self.assertEqual( expected,", "def ddtest_object_not_modified_with_if_modified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_headers( self, object_type,", "self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_history_container_name = self.create_temp_container(", "dest = '/{0}/{1}'.format(dest_container_name, dest_object_name) headers = {'Destination': dest} response =", "class ObjectSmokeTest(ObjectStorageFixture): @classmethod def setUpClass(cls): super(ObjectSmokeTest, cls).setUpClass() cls.default_obj_name = Constants.VALID_OBJECT_NAME_WITH_UNICODE", "= 201 received = create_response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method,", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_unmodified_since( self, object_type, generate_object): container_name", "headers=hdrs) method = 'put copy object' expected = 201 received", "method=method, expected=expected, received=str(received))) response = self.client.get_object( dest_container_name, dest_obj_name) method =", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = { 'Access-Control-Allow-Methods': 'GET,", "= self.client.get_object( dest_container_name, dest_object_name) method = 'copied object retrieval' expected", "a single object should be created. \"\"\" container_name = self.create_temp_container(", "expected = 200 received = object_response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format(", "response = self.client.set_object_metadata( container_name, object_name, headers=headers) method = 'set object", "method = 'copy object' expected = 201 received = response.status_code", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Origin', response.headers, msg=\"Access-Control-Allow-Origin header was", "{'Access-Control-Max-Age': '5'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response')", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Request-Method':", "generate_object(container_name, object_name) response = ver2_info.get('response') method = 'update version one", "metadata' expected = 202 received = response.status_code self.assertEqual( expected, received,", "'partial object retrieval with start and end range' expected =", "= generate_object(container_name, object_name) headers = {'If-None-Match': 'grok'} response = self.client.get_object(", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_none_match( self, object_type, generate_object):", "headers=object1_headers) response = self.client.get_object( container_name, object1_name) expected = 'text/plain' received", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_valid_object_name( self,", "creation with Access-Control-Request-Headers header' expected = 201 received = response.status_code", "= {'X-Versions-Location': object_history_container_name} self.client.set_container_metadata(container_name, headers=headers) # list objects in non-current", "response.headers, msg=\"Access-Control-Allow-Methods header was set\") expected = 'GET, POST, OPTIONS'", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Request-Headers': 'x-requested-with'} object_info", "ddtest_object_retrieval_with_if_modified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Disposition', response.headers, msg=\"Content-Disposition header", "object2_headers = {'X-Detect-Content-Type': True} generate_object(container_name, object2_name, headers=object2_headers) response = self.client.get_object(", "object_info = generate_object(container_name, object_name, data_op=object_data_op, headers=object_headers) response = object_info.get('response') method", "method = 'object should be flagged as not modified' expected", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_modified_since( self, object_type, generate_object): container_name =", "method = 'object creation with Content-Encoding header' expected = 201", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) # Update an", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "yield \"Test chunk %s\\r\\n\" % i @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_valid_object_name( self,", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name = 'object1.txt' object1_headers = {'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name,", "received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_methods( self, object_type, generate_object): container_name =", "modified' expected = 304 received = response.status_code self.assertEqual( expected, received,", "'Fri, 17 Aug 2101 18:44:42 GMT'} response = self.client.get_object( container_name,", "self.client.delete_object( container_name, object_name) method = 'delete object' expected = 204", "was set\") expected = '5' received = response.headers.get('Access-Control-Max-Age') self.assertEqual( expected,", "expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_origin(", "method = 'object creation with Access-Control-Max-Age header' expected = 201", "<filename>cloudroast/objectstorage/smoke/object_smoke.py \"\"\" Copyright 2015 Rackspace Licensed under the Apache License,", "= {'Access-Control-Request-Method': 'GET'} object_info = generate_object(container_name, object_name, headers=object_headers) response =", "data = zlib.compress(data) return (data, extra_data) object_headers = {'Content-Encoding': 'gzip'}", "container_name, self.default_obj_name) self.assertIn( 'etag', object_response.headers, msg=\"Etag header was set\") expected", "'Content-Disposition', response.headers, msg=\"Content-Disposition header was set\") expected = 'attachment; filename=testdata.txt'", "'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) object2_name = 'object2.txt' object2_headers = {'X-Detect-Content-Type':", "'GET' received = response.headers.get('Access-Control-Request-Method') self.assertEqual( expected, received, msg='object created with", "object_info.get('response') method = 'object creation with valid object name' expected", "expected status code {expected}' ' received status code {received}') @DataDrivenFixture", "self.client.get_object( container_name, object2_name) self.assertEqual( expected, received, msg='object created should have", "= 'x-requested-with' received = response.headers.get('Access-Control-Request-Headers') self.assertEqual( expected, received, msg='object created", "response = self.client.list_objects( object_history_container_name) method = 'list on empty versioned", "object (version 1) object_name = self.default_obj_name ver1_info = generate_object(container_name, object_name)", "object_name = self.default_obj_name object_headers = { 'Access-Control-Allow-Methods': 'GET, POST, OPTIONS'}", "method = 'object retrieval with if modified since header (past", "method = 'object retrieval' expected = 404 received = response.status_code", "creation with valid object name' expected = 200 received =", "response = object_info.get('response') method = 'object creation with Access-Control-Request-Method header'", "self.default_obj_name object_headers = {'X-Delete-After': '60'} object_info = generate_object(container_name, object_name, headers=object_headers)", "= self.default_obj_name generate_object(container_name, object_name) headers = {'If-Modified-Since': 'Fri, 17 Aug", "201 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected,", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_info = generate_object(container_name, object_name) headers", "generate_object(container_name, object_name) headers = {'Range': 'bytes=5-8'} response = self.client.get_object( container_name,", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Encoding',", "creation with X-Delete-At header' expected = 201 received = response.status_code", "response_md5 = md5(response.content).hexdigest() self.assertEqual( object_info.get('md5'), response_md5, msg='should return identical object')", "test_object_creation_via_chunked_transfer(self): \"\"\" Scenario: Create an object using chunked transfer encoding.", "= self.default_obj_name object_headers = { 'Content-Disposition': 'attachment; filename=testdata.txt'} object_info =", "2015 Rackspace Licensed under the Apache License, Version 2.0 (the", "'Content-Type': CONTENT_TYPES.get('text')} response = self.client.create_object( container_name, self.default_obj_name, headers=headers, data=updated_object_data) method", "self.assertIn( 'Content-Disposition', response.headers, msg=\"Content-Disposition header was set\") expected = 'attachment;", "{0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_headers( self,", "object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=-4'} response", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Methods', response.headers, msg=\"Access-Control-Allow-Methods", "if modified since header (past date)' expected = 200 received", "expected, received, msg='access-control-allow-origin header should reflect origin' ' expected: {0}", "governing permissions and limitations under the License. \"\"\" import calendar", "'gzip'} object_info = generate_object(container_name, object_name, data_op=object_data_op, headers=object_headers) response = object_info.get('response')", "def ddtest_object_creation_with_file_compression( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "expected, received, msg='object created with X-Delete-At header value' ' expected:", "self.assertIn( 'X-Delete-At', response.headers, msg=\"X-Delete-At header was set\") @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object_versioning') def", "self.default_obj_name object_headers = { 'Access-Control-Allow-Methods': 'GET, POST, OPTIONS'} object_info =", "received)) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update(self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "True, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) response = self.client.get_object( container_name,", "{\"Transfer-Encoding\": \"chunked\"} create_response = self.client.create_object( container_name, self.default_obj_name, headers=headers, data=self.generate_chunk_data()) method", "self.assertIn( 'Access-Control-Max-Age', response.headers, msg=\"Access-Control-Max-Age header was set\") expected = '5'", "method = 'object creation with Access-Control-Allow-Credentials header' expected = 201", "unittest from cafe.drivers.unittest.decorators import ( DataDrivenFixture, data_driven_test) from cloudcafe.objectstorage.objectstorage_api.common.constants import", "received, msg='object created with Etag header' ' value expected: {0}", "{ 'Content-Disposition': 'attachment; filename=testdata.txt'} object_info = generate_object(container_name, object_name, headers=object_headers) response", "ddtest_partial_object_retrieval_with_complete_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "this tests assertion, needs review') @data_driven_test(ObjectDatasetList()) def ddtest_put_copy_object(self, object_type, generate_object):", "expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_controle_max_age( self, object_type, generate_object): container_name", "'object should be flagged as not modified' expected = 304", "= '{0}_destination'.format(self.default_obj_name) source = '/{0}/{1}'.format(src_container_name, src_object_name) hdrs = {'X-Copy-From': source,", "retrieval' expected = 404 received = response.status_code self.assertEqual( expected, received,", "response.headers.get('X-Delete-At') self.assertEqual( expected, received, msg='object created with X-Delete-At header value'", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name,", "under the Apache License, Version 2.0 (the \"License\"); you may", "object' expected = 204 received = response.status_code self.assertEqual( expected, received,", "def ddtest_partial_object_retrieval_with_complete_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "'x-requested-with'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method", "updated_object_data = 'Updated test file data' updated_content_length = str(len(updated_object_data)) headers", "= 'set object metadata X-Object-Meta-Grok: Drok' expected = 202 received", "{ 'Access-Control-Allow-Origin': 'http://example.com'} object_info = generate_object(container_name, object_name, headers=object_headers) response =", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "received=str(received))) object_response = self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag', object_response.headers, msg=\"Etag", "{0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def test_object_creation_with_uppercase_etag(self): container_name", "empty versioned container' expected = 204 received = response.status_code self.assertEqual(", "response = object_info.get('response') method = 'object creation with Access-Control-Allow-Methods header'", "201 status code and a single object should be created.", "'5' received = response.headers.get('Access-Control-Max-Age') self.assertEqual( expected, received, msg='object created with", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Method', response.headers, msg=\"Access-Control-Request-Method header was set\")", "= 'GET, POST, OPTIONS' received = response.headers.get('Access-Control-Allow-Methods') self.assertEqual( expected, received,", "creation with valid object name' expected = 201 received =", "@data_driven_test(ObjectDatasetList()) def ddtest_copy_object(self, object_type, generate_object): src_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) dest_container_name", "= 'X-Foo-Header' received = response.headers.get('Access-Control-Expose-Headers') self.assertEqual( expected, received, msg='object created", "'slo'])) def ddtest_object_update_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "msg=\"object updated with X-Object-Meta-Grok header\") expected = 'Drok' received =", "Return a 201 status code and a single object should", "= ver1_info.get('response') method = 'object version one creation' expected =", "for dlo/slo support of If-match Header: https://bugs.launchpad.net/swift/+bug/1279076 \"\"\" container_name =", "{1}'.format(expected, received)) object2_name = 'object2.txt' object2_headers = {'X-Detect-Content-Type': True} generate_object(container_name,", "expected = 'http://example.com' received = response.headers.get('access-control-allow-origin') self.assertEqual( expected, received, msg='access-control-allow-origin", "in writing, software distributed under the License is distributed on", "def ddtest_put_copy_object(self, object_type, generate_object): src_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) dest_container_name =", "required by applicable law or agreed to in writing, software", "'object retrieval' expected = 404 received = response.status_code self.assertEqual( expected,", "self.default_obj_name) self.assertIn( 'Access-Control-Request-Method', response.headers, msg=\"Access-Control-Request-Method header was set\") expected =", "{1}'.format(expected, received)) def test_object_creation_via_chunked_transfer(self): \"\"\" Scenario: Create an object using", "self.client.get_object( container_name, self.default_obj_name) method = 'object retrieval' expected = 200", "object_info.get('response') method = 'object creation with Access-Control-Allow-Methods header' expected =", "headers = {'X-Object-Meta-Foo': 'Bar'} response = self.client.set_object_metadata( container_name, self.default_obj_name, headers=headers)", "generate_chunk_data(): for i in range(10): yield \"Test chunk %s\\r\\n\" %", "expected=expected, received=str(received))) response = self.client.get_object( container_name, self.default_obj_name) method = 'object", "' value expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors')", "creation with content disposition header' expected = 201 received =", "origin' ' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def", "zlib from hashlib import md5 import unittest from cafe.drivers.unittest.decorators import", "msg='object created with Access-Control-Expose-Headers header' ' value expected: {0} received:", "self.default_obj_name, headers=headers) method = 'object retrieval with if match header'", "created with Access-Control-Allow-Origin header' ' value expected: {0} received: {1}'.format(", "msg=\"Access-Control-Expose-Headers header was set\") expected = 'X-Foo-Header' received = response.headers.get('Access-Control-Expose-Headers')", "distributed under the License is distributed on an \"AS IS\"", "created with Content-Encoding header value' ' expected: {0} received: {1}'.format(expected,", "'partial object retrieval with end range' expected = 206 received", "was set\") expected = data_md5 received = object_response.headers.get('etag') self.assertEqual( expected,", "header was set\") expected = 'attachment; filename=testdata.txt' received = response.headers.get('Content-Disposition')", "created with Content-Disposition header value' ' expected: {0} received: {1}'.format(expected,", "CONDITIONS OF ANY KIND, either express or implied. See the", "self.client.set_object_metadata( container_name, self.default_obj_name, headers=headers) method = 'set object metadata' expected", "= md5(response.content).hexdigest() self.assertEqual( object_info.get('md5'), response_md5, msg='should return identical object') @data_driven_test(ObjectDatasetList(exclude=['dlo',", "received = response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received, msg='object X-Object-Meta-Grok header value", "headers=headers) method = 'partial object retrieval with complete range' expected", "container_name, self.default_obj_name) method = 'object retrieval' expected = 200 received", "= 'object creation with Access-Control-Expose-Headers header' expected = 201 received", "60)) object_headers = {'X-Delete-At': future_time} object_info = generate_object(container_name, object_name, headers=object_headers)", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_end_range( self, object_type, generate_object): container_name =", "= { 'Access-Control-Allow-Origin': 'http://example.com'} object_info = generate_object(container_name, object_name, headers=object_headers) response", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name = 'object1.txt' object1_headers = {'X-Detect-Content-Type': True,", "an object using chunked transfer encoding. Expected Results: Return a", "response.headers, msg=\"access-control-allow-origin header should be set\") expected = 'http://example.com' received", "response.headers.get('access-control-allow-origin') self.assertEqual( expected, received, msg='access-control-allow-origin header should reflect origin' '", "Access-Control-Expose-Headers header' expected = 201 received = response.status_code self.assertEqual( expected,", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Request-Method': 'GET'}", "expected = data_md5 received = object_response.headers.get('etag') self.assertEqual( expected, received, msg='object", "created with Access-Control-Request-Headers header' ' value expected: {0} received: {1}'.format(", "https://bugs.launchpad.net/swift/+bug/1279076 \"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_info", "method = 'partial object retrieval with start range' expected =", "@data_driven_test(ObjectDatasetList()) def ddtest_put_copy_object(self, object_type, generate_object): src_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) dest_container_name", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_history_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {'X-Versions-Location':", "{'Access-Control-Request-Headers': 'x-requested-with'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response')", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) response = self.client.get_object(", "received, msg='object created with Access-Control-Max-Age header' ' value expected: {0}", "= generate_object(container_name, object_name, data_op=object_data_op, headers=object_headers) response = object_info.get('response') method =", "self.default_obj_name, headers=headers) method = 'object retrieval with if unmodified since", "= {'Access-Control-Allow-Credentials': 'true'} object_info = generate_object(container_name, object_name, headers=object_headers) response =", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_range( self, object_type, generate_object): container_name =", "= {'X-Detect-Content-Type': True, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) response =", "code {expected}' ' received status code {received}') @DataDrivenFixture class ObjectSmokeTest(ObjectStorageFixture):", "{0} received: {1}'.format(expected, received)) headers = {'X-Object-Meta-Foo': 'Bar'} response =", "msg='access-control-allow-origin header should reflect origin' ' expected: {0} received: {1}'.format(expected,", "with this tests assertion, needs review') @data_driven_test(ObjectDatasetList()) def ddtest_put_copy_object(self, object_type,", "= object_info.get('etag') else: expected = '\"{0}\"'.format(object_info.get('etag')) received = response.headers.get('etag') self.assertEqual(", "expected = 201 received = create_response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format(", "object_name) response = ver1_info.get('response') method = 'object version one creation'", "self.client.get_object( container_name, self.default_obj_name) method = 'object retrieval' expected = 404", "with Access-Control-Allow-Credentials header' ' value expected: {0} received: {1}'.format( expected,", "method = 'object creation with X-Delete-After header' expected = 201", "src_object_name, headers=headers) method = 'copy object' expected = 201 received", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_deletion_with_valid_object(", "response = object_info.get('response') method = 'object creation with content disposition", "set\") expected = 'X-Foo-Header' received = response.headers.get('Access-Control-Expose-Headers') self.assertEqual( expected, received,", "expected = '5' received = response.headers.get('Access-Control-Max-Age') self.assertEqual( expected, received, msg='object", "True} generate_object(container_name, object2_name, headers=object2_headers) response = self.client.get_object( container_name, object2_name) expected", "= {'X-Detect-Content-Type': True} generate_object(container_name, object2_name, headers=object2_headers) response = self.client.get_object( container_name,", "object_name) method = 'delete object' expected = 204 received =", "= self.client.get_object(container_name, self.default_obj_name) method = 'Object retrieval' expected = 200", "headers = {'Range': 'bytes=99-0'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers)", "= self.default_obj_name object_data = \"valid_data\" data_md5 = md5(object_data).hexdigest() upper_etag =", "object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name headers", "with X-Delete-After header' expected = 201 received = response.status_code self.assertEqual(", "a 201 status code and a single object should be", "{0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_credentials( self,", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.list_objects(object_history_container_name)", "= response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response", "self.default_obj_name, headers=headers) method = 'partial object retrieval with complete range'", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "OPTIONS' received = response.headers.get('Access-Control-Allow-Methods') self.assertEqual( expected, received, msg='object created with", "'http://example.com' received = response.headers.get('access-control-allow-origin') self.assertEqual( expected, received, msg='access-control-allow-origin header should", "{'X-Copy-From': source, 'Content-Length': '0'} response = self.client.copy_object( dest_container_name, dest_obj_name, headers=hdrs)", "= response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received, msg='object created with X-Object-Meta-Grok header", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name def object_data_op(data, extra_data): data", "Rackspace Licensed under the Apache License, Version 2.0 (the \"License\");", "import ObjectStorageFixture from cloudroast.objectstorage.generators import ( ObjectDatasetList, CONTENT_TYPES) CONTAINER_DESCRIPTOR =", "= {'Origin': 'http://example.com'} response = self.client.get_object_metadata( container_name, object_name, headers=headers) self.assertIn(", "one object' expected = 201 received = response.status_code self.assertEqual( expected,", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) response", "= self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=99-0'} response =", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_deletion_with_valid_object( self, object_type, generate_object): container_name", "@data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_content_disposition( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "self.default_obj_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object updated with X-Object-Meta-Grok header\") expected", "since header (future date)' expected = 304 received = response.status_code", "chunked transfer' expected = 201 received = create_response.status_code self.assertEqual( expected,", "self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=5-8'} response = self.client.get_object(", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_info = generate_object(container_name,", "@ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_method( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name headers = {'access-control-allow-origin': 'http://example.com', 'access-control-expose-headers': 'X-Trans-Id'}", "dest_obj_name, headers=hdrs) method = 'put copy object' expected = 201", "'Content-Length': '0'} response = self.client.copy_object( dest_container_name, dest_obj_name, headers=hdrs) method =", "object_name = self.default_obj_name object_info = generate_object(container_name, object_name) response = object_info.get('response')", "CONTENT_TYPES) CONTAINER_DESCRIPTOR = 'object_smoke_test' STATUS_CODE_MSG = ('{method} expected status code", "container_name, self.default_obj_name) self.assertIn( 'Content-Encoding', response.headers, msg=\"Content-Encoding header was set\") expected", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_end_range( self, object_type, generate_object): container_name", "= 'object creation with Access-Control-Allow-Origin header' expected = 201 received", "object_response = self.client.get_object(container_name, self.default_obj_name) method = 'Object retrieval' expected =", "'true' received = response.headers.get('Access-Control-Allow-Credentials') self.assertEqual( expected, received, msg='object created with", "received=str(received))) response = self.client.get_object( dest_container_name, dest_obj_name) method = 'copied object", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_complete_range( self, object_type, generate_object): container_name = self.create_temp_container(", "object1_name, headers=object1_headers) response = self.client.get_object( container_name, object1_name) expected = 'text/plain'", "headers=object_headers) response = object_info.get('response') method = 'object creation with Access-Control-Request-Method", "= 201 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method,", "headers=headers) headers = {'Origin': 'http://example.com'} response = self.client.get_object_metadata( container_name, object_name,", "object_name = self.default_obj_name generate_object(container_name, object_name) response = self.client.get_object(container_name, object_name) method", "import zlib from hashlib import md5 import unittest from cafe.drivers.unittest.decorators", "object_history_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {'X-Versions-Location': object_history_container_name} self.client.set_container_metadata(container_name, headers=headers)", "method = 'Object retrieval' expected = 200 received = object_response.status_code", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name headers = {'access-control-allow-origin': 'http://example.com',", "import md5 import unittest from cafe.drivers.unittest.decorators import ( DataDrivenFixture, data_driven_test)", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_deletion_with_valid_object( self, object_type, generate_object): container_name =", "= '\"{0}\"'.format(object_info.get('etag')) received = response.headers.get('etag') self.assertEqual( expected, received, msg='object created", "may not use this file except in compliance with the", "{'If-Unmodified-Since': 'Fri, 17 Aug 2101 18:44:42 GMT'} response = self.client.get_object(", "X-Delete-At header' expected = 201 received = response.status_code self.assertEqual( expected,", "data_op=object_data_op, headers=object_headers) response = object_info.get('response') method = 'object creation with", "Bug filed for dlo/slo support of If-match Header: https://bugs.launchpad.net/swift/+bug/1279076 \"\"\"", "headers=headers) method = 'partial object retrieval with start range' expected", "container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Origin', response.headers, msg=\"Access-Control-Allow-Origin header was set\") expected", "+ 60)) object_headers = {'X-Delete-At': future_time} object_info = generate_object(container_name, object_name,", "from cloudcafe.objectstorage.objectstorage_api.common.constants import \\ Constants from cloudroast.objectstorage.fixtures import ObjectStorageFixture from", "received=str(received))) response_md5 = md5(response.content).hexdigest() self.assertEqual( object_info.get('md5'), response_md5, msg='should return identical", "received, msg='object created with Access-Control-Request-Headers header' ' value expected: {0}", "container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Method', response.headers, msg=\"Access-Control-Request-Method header was set\") expected", "= response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) #", "'copied object retrieval' expected = 200 received = response.status_code self.assertEqual(", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response_md5 = md5(response.content).hexdigest() self.assertEqual( object_info.get('md5'),", "create_response = self.client.create_object( container_name, self.default_obj_name, headers=headers, data=self.generate_chunk_data()) method = 'Object", "an object (version 1) object_name = self.default_obj_name ver1_info = generate_object(container_name,", "'object1.txt' object1_headers = {'X-Detect-Content-Type': True, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers)", "generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name = 'object1.txt' object1_headers =", "expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_expose_headers(", "@data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_x_delete_at( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "'X-Object-Meta-Grok', response.headers, msg=\"object not created with X-Object-Meta-Grok header\") expected =", "received, msg='object X-Object-Meta-Foo header value expected: {0}' ' received: {1}'.format(expected,", "{'If-Modified-Since': 'Fri, 17 Aug 2101 18:44:42 GMT'} response = self.client.get_object(", "= self.default_obj_name object_headers = { 'Access-Control-Allow-Origin': 'http://example.com'} object_info = generate_object(container_name,", "generate_object(container_name, object2_name, headers=object2_headers) response = self.client.get_object( container_name, object1_name) expected =", "msg=\"Access-Control-Request-Method header was set\") expected = 'GET' received = response.headers.get('Access-Control-Request-Method')", "received=str(received))) response = self.client.list_objects(object_history_container_name) method = 'list on versioned container'", "self.assertIn( 'access-control-allow-origin', response.headers, msg=\"access-control-allow-origin header should be set\") expected =", "method = 'partial object retrieval with end range' expected =", "= response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) headers", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_not_modified_with_if_modified_since( self, object_type, generate_object): container_name", "'Updated test file data' updated_content_length = str(len(updated_object_data)) headers = {'Content-Length':", "headers=headers) method = 'object retrieval with if none match header'", "received: {1}'.format(expected, received)) object2_name = 'object2.txt' object2_headers = {'X-Detect-Content-Type': True}", "= generate_object(container_name, object_name) response = object_info.get('response') method = 'object creation", "method = 'object creation with Access-Control-Expose-Headers header' expected = 201", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Disposition', response.headers, msg=\"Content-Disposition header was", "object_name) headers = {'X-Object-Meta-Grok': 'Drok'} response = self.client.set_object_metadata( container_name, object_name,", "response.headers, msg=\"X-Delete-At header was set\") expected = future_time received =", "container_name, object_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object not created with X-Object-Meta-Grok", "method = 'partial object retrieval with start and end range'", "generate_object(container_name, object_name) headers = {'If-Modified-Since': 'Fri, 17 Aug 2101 18:44:42", "Access-Control-Request-Method header' ' value expected: {0} received: {1}'.format( expected, received))", "Header: https://bugs.launchpad.net/swift/+bug/1279076 \"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_info = generate_object(container_name, object_name)", "src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_obj_name = '{0}_destination'.format(self.default_obj_name) source =", "retrieval with complete range' expected = 200 received = response.status_code", "encoding. Expected Results: Return a 201 status code and a", "agreed to in writing, software distributed under the License is", "response.headers, msg=\"Content-Disposition header was set\") expected = 'attachment; filename=testdata.txt' received", "generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_info =", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Credentials', response.headers, msg=\"Access-Control-Allow-Credentials header was set\")", "received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_headers( self, object_type, generate_object): container_name =", "header was set\") expected = 'x-requested-with' received = response.headers.get('Access-Control-Request-Headers') self.assertEqual(", "{0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_x_delete_at( self, object_type, generate_object):", "import unittest from cafe.drivers.unittest.decorators import ( DataDrivenFixture, data_driven_test) from cloudcafe.objectstorage.objectstorage_api.common.constants", "'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) response = self.client.get_object( container_name, object1_name)", "str(int(start_time + 60)) object_headers = {'X-Delete-At': future_time} object_info = generate_object(container_name,", "= '/{0}/{1}'.format(src_container_name, src_object_name) hdrs = {'X-Copy-From': source, 'Content-Length': '0'} response", "{1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_expose_headers( self, object_type, generate_object):", "received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_detected_with_detect_content_type( self, object_type, generate_object): container_name = self.create_temp_container(", "creation with uppercase etag header' expected = 201 received =", "@data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_unmodified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "creation via chunked transfer' expected = 201 received = create_response.status_code", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Expose-Headers', response.headers, msg=\"Access-Control-Expose-Headers header", "expected = 'text/plain' received = response.headers.get('content-type') self.assertEqual( expected, received, msg='object", "X-Delete-After header' expected = 201 received = response.status_code self.assertEqual( expected,", "msg=\"Access-Control-Allow-Origin header was set\") expected = 'http://example.com' received = response.headers.get('Access-Control-Allow-Origin')", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def", "expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_retrieval_with_origin(", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "response = ver1_info.get('response') method = 'object version one creation' expected", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) dest_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) src_object_name = '{0}_source'.format(self.default_obj_name)", "{0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_expose_headers( self,", "@data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_none_match( self, object_type, generate_object): \"\"\" Bug filed", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_range( self,", "versioned container' expected = 200 received = response.status_code self.assertEqual( expected,", "expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_credentials(", "object_name) headers = {'If-None-Match': 'grok'} response = self.client.get_object( container_name, self.default_obj_name,", "= object_response.headers.get('etag') self.assertEqual( expected, received, msg='object created with Etag header'", "container_name, self.default_obj_name) self.assertIn( 'Content-Disposition', response.headers, msg=\"Content-Disposition header was set\") expected", "Access-Control-Max-Age header' expected = 201 received = response.status_code self.assertEqual( expected,", "dest_object_name) method = 'copied object retrieval' expected = 200 received", "def ddtest_object_retrieval_with_if_modified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "{'X-Detect-Content-Type': True, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) response = self.client.get_object(", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_unmodified_since( self, object_type, generate_object): container_name = self.create_temp_container(", "= object_info.get('response') method = 'object creation with content disposition header'", "descriptor=CONTAINER_DESCRIPTOR) dest_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name)", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Max-Age': '5'} object_info", "method = 'update version one object' expected = 201 received", "= response.headers.get('Access-Control-Request-Headers') self.assertEqual( expected, received, msg='object created with Access-Control-Request-Headers header'", "method = 'object creation with content disposition header' expected =", "self.client.copy_object( src_container_name, src_object_name, headers=headers) method = 'copy object' expected =", "was set\") expected = 'X-Foo-Header' received = response.headers.get('Access-Control-Expose-Headers') self.assertEqual( expected,", "received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_methods( self, object_type,", "msg=\"Etag header was set\") expected = data_md5 received = object_response.headers.get('etag')", "@data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_complete_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "value' ' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_x_delete_at(", "for i in range(10): yield \"Test chunk %s\\r\\n\" % i", "response.headers, msg=\"object updated with X-Object-Meta-Grok header\") expected = 'Drok' received", "object retrieval with start range' expected = 206 received =", "retrieval with if modified since header (past date)' expected =", "container response = self.client.list_objects( object_history_container_name) method = 'list on empty", "@classmethod def setUpClass(cls): super(ObjectSmokeTest, cls).setUpClass() cls.default_obj_name = Constants.VALID_OBJECT_NAME_WITH_UNICODE @staticmethod def", "= create_response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) object_response", "= future_time received = response.headers.get('X-Delete-At') self.assertEqual( expected, received, msg='object created", "= 'Bar' received = response.headers.get('X-Object-Meta-Foo') self.assertEqual( expected, received, msg='object X-Object-Meta-Foo", "response.headers.get('Access-Control-Request-Method') self.assertEqual( expected, received, msg='object created with Access-Control-Request-Method header' '", "self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=-4'} response = self.client.get_object(", "method = 'object creation with Access-Control-Request-Headers header' expected = 201", "container_name, self.default_obj_name) self.assertIn( 'X-Delete-At', response.headers, msg=\"X-Delete-At header was set\") @data_driven_test(ObjectDatasetList())", "= object_info.get('response') method = 'object creation with X-Delete-After header' expected", "method = 'object retrieval with if match header' expected =", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update_with_object_possessing_metadata( self, object_type,", "descriptor=CONTAINER_DESCRIPTOR) headers = {'X-Versions-Location': object_history_container_name} self.client.set_container_metadata(container_name, headers=headers) # list objects", "with if unmodified' ' since header') expected = 412 received", "compliance with the License. You may obtain a copy of", "= 412 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method,", "response.headers.get('Access-Control-Max-Age') self.assertEqual( expected, received, msg='object created with Access-Control-Max-Age header' '", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) updated_object_data = 'Updated", "= response.headers.get('access-control-allow-origin') self.assertEqual( expected, received, msg='access-control-allow-origin header should reflect origin'", "retrieval precondition fail with if unmodified' ' since header') expected", "= str(int(start_time + 60)) object_headers = {'X-Delete-At': future_time} object_info =", "expected=expected, received=str(received))) object_response = self.client.get_object(container_name, self.default_obj_name) method = 'Object retrieval'", "object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name,", "self.assertEqual( expected, received, msg='object created with Access-Control-Max-Age header' ' value", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name def object_data_op(data, extra_data): data =", "method = 'delete object' expected = 204 received = response.status_code", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Origin', response.headers, msg=\"Access-Control-Allow-Origin", "@data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_headers( self, object_type, generate_object): container_name = self.create_temp_container(", "expected, received, msg='object created with Etag header' ' value expected:", "single object should be created. \"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "retrieval with if match header' expected = 200 received =", "with Access-Control-Request-Headers header' ' value expected: {0} received: {1}'.format( expected,", "object_name = self.default_obj_name ver2_info = generate_object(container_name, object_name) response = ver2_info.get('response')", "{'Destination': dest} response = self.client.copy_object( src_container_name, src_object_name, headers=headers) method =", "object_type == 'standard': expected = object_info.get('etag') else: expected = '\"{0}\"'.format(object_info.get('etag'))", "{1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_headers( self, object_type, generate_object):", "container_name, self.default_obj_name, headers=headers, data=updated_object_data) method = 'object update with valid", "method=method, expected=expected, received=str(received))) object_response = self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag',", "self.assertIn( 'Access-Control-Request-Headers', response.headers, msg=\"Access-Control-Request-Headers header was set\") expected = 'x-requested-with'", "17 Aug 2001 18:44:42 GMT'} response = self.client.get_object( container_name, self.default_obj_name,", "# Update an object (version 2) object_name = self.default_obj_name ver2_info", "{0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_method( self,", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) response =", "{1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_method( self, object_type, generate_object):", "headers = {'Content-Length': updated_content_length, 'Content-Type': CONTENT_TYPES.get('text')} response = self.client.create_object( container_name,", "self.default_obj_name, headers=headers) method = 'object should be flagged as not", "container_name, object_name, headers=headers) method = 'set object metadata X-Object-Meta-Grok: Drok'", "expected: {0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_not_detected_without_detect_content_type_header( self,", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Methods', response.headers,", "ddtest_partial_object_retrieval_with_start_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "{'access-control-allow-origin': 'http://example.com', 'access-control-expose-headers': 'X-Trans-Id'} generate_object(container_name, object_name, headers=headers) headers = {'Origin':", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_end_range(", "method = 'object retrieval' expected = 200 received = response.status_code", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Encoding', response.headers,", "headers=headers) method = 'object retrieval with if modified since header", "{0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_detected_with_detect_content_type( self, object_type,", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_match( self, object_type, generate_object):", "with complete range' expected = 200 received = response.status_code self.assertEqual(", "object_name, headers=headers) method = 'set object metadata X-Object-Meta-Grok: Drok' expected", "expected = 'application/x-www-form-urlencoded' received = response.headers.get('content-type') self.assertEqual( expected, received, msg='object", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = { 'Access-Control-Allow-Origin':", "ddtest_object_retrieval_fails_with_if_unmodified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "response.headers.get('etag') self.assertEqual( expected, received, msg='object created with Etag header' '", "created. \"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {\"Transfer-Encoding\": \"chunked\"}", "'60'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method", "def ddtest_object_retrieval_with_origin( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Headers', response.headers, msg=\"Access-Control-Request-Headers header was set\") expected", "received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_creation_with_file_compression( self, object_type, generate_object): container_name =", "ddtest_content_type_detected_with_detect_content_type( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name =", "= response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo',", "received, msg='object created with X-Object-Meta-Grok header value' ' expected: {0}", "headers = {'If-Match': obj_info.get('etag')} response = self.client.get_object( container_name, self.default_obj_name, headers=headers)", "container_name, self.default_obj_name) self.assertIn( 'X-Delete-At', response.headers, msg=\"X-Delete-At header was set\") expected", "header value' ' expected: {0} received: {1}'.format(expected, received)) headers =", "set\") expected = 'GET, POST, OPTIONS' received = response.headers.get('Access-Control-Allow-Methods') self.assertEqual(", "response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'object should", "= ('{method} expected status code {expected}' ' received status code", "object1_name = 'object1.txt' object1_headers = {'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers)", "expected = future_time received = response.headers.get('X-Delete-At') self.assertEqual( expected, received, msg='object", "msg='object created with Access-Control-Request-Method header' ' value expected: {0} received:", "self.default_obj_name generate_object(container_name, object_name) headers = {'X-Object-Meta-Grok': 'Drok'} response = self.client.set_object_metadata(", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) headers = {'If-None-Match': object_info.get('etag')} response", "= object_info.get('response') method = 'object creation with X-Delete-At header' expected", "generate_object(container_name, object_name) headers = {'If-Unmodified-Since': 'Fri, 17 Aug 2001 18:44:42", "object_name, data=object_data, headers=headers) method = 'object creation with uppercase etag", "expected, received, msg='object created with Access-Control-Max-Age header' ' value expected:", "self.default_obj_name) self.assertIn( 'etag', response.headers, msg=\"Etag header was set\") if object_type", "expected, received, msg='object created with Access-Control-Allow-Origin header' ' value expected:", "response.headers, msg=\"X-Delete-At header was set\") @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object_versioning') def ddtest_versioned_container_creation_with_valid_data( self,", "ddtest_partial_object_retrieval_with_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "received = response.headers.get('content-type') self.assertEqual( expected, received, msg='object created should have", "{1}'.format(expected, received)) response = self.client.get_object( container_name, object2_name) self.assertEqual( expected, received,", "= 'Drok' received = response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received, msg='object X-Object-Meta-Grok", "received = response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received, msg='object created with X-Object-Meta-Grok", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name = 'object1.txt' object1_headers = {'X-Detect-Content-Type': True, 'Content-Type':", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object( dest_container_name, dest_obj_name) method", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_not_modified_with_if_modified_since( self, object_type, generate_object): container_name =", "{1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_methods( self, object_type, generate_object):", "status code {expected}' ' received status code {received}') @DataDrivenFixture class", "= response.headers.get('Content-Encoding') self.assertEqual( expected, received, msg='object created with Content-Encoding header", "object_name = self.default_obj_name ver1_info = generate_object(container_name, object_name) response = ver1_info.get('response')", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Methods', response.headers, msg=\"Access-Control-Allow-Methods header was set\")", "header' expected = 201 received = response.status_code self.assertEqual( expected, received,", "modified since header (past date)' expected = 200 received =", "'object1.txt' object1_headers = {'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) object2_name =", "= {'If-None-Match': 'grok'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method", "Access-Control-Allow-Methods header' ' value expected: {0} received: {1}'.format( expected, received))", "response.headers, msg=\"Access-Control-Allow-Credentials header was set\") expected = 'true' received =", "response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'partial object", "with X-Object-Meta-Grok header\") expected = 'Drok' received = response.headers.get('X-Object-Meta-Grok') self.assertEqual(", "'object creation with etag header' expected = 201 received =", "'object update with valid object name' expected = 201 received", "dest_object_name = '{0}_destination'.format(self.default_obj_name) dest = '/{0}/{1}'.format(dest_container_name, dest_object_name) headers = {'Destination':", "container_name, self.default_obj_name, headers=headers) method = 'object retrieval with if modified", "method = 'object creation with X-Delete-At header' expected = 201", "container_name, self.default_obj_name, headers=headers, data=self.generate_chunk_data()) method = 'Object creation via chunked", "self.assertIn( 'Access-Control-Allow-Methods', response.headers, msg=\"Access-Control-Allow-Methods header was set\") expected = 'GET,", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object( container_name,", "= 'Drok' received = response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received, msg='object created", "'application/x-www-form-urlencoded' received = response.headers.get('content-type') self.assertEqual( expected, received, msg='object created should", "headers=object2_headers) response = self.client.get_object( container_name, object2_name) expected = 'text/plain' received", "self.default_obj_name) self.assertIn( 'Access-Control-Expose-Headers', response.headers, msg=\"Access-Control-Expose-Headers header was set\") expected =", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) response", "{'X-Object-Meta-Foo': 'Bar'} response = self.client.set_object_metadata( container_name, self.default_obj_name, headers=headers) method =", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_fails_with_if_unmodified_since( self, object_type, generate_object):", "response.headers, msg=\"Access-Control-Request-Headers header was set\") expected = 'x-requested-with' received =", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update_with_object_possessing_metadata( self,", "def ddtest_content_type_detected_with_detect_content_type( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name", "creation with Access-Control-Expose-Headers header' expected = 201 received = response.status_code", "src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_object_name = '{0}_destination'.format(self.default_obj_name) dest =", "= self.default_obj_name generate_object(container_name, object_name) response = self.client.delete_object( container_name, object_name) method", "since header (past date)' expected = 200 received = response.status_code", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name headers = {'access-control-allow-origin':", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = { 'Access-Control-Allow-Origin': 'http://example.com'} object_info", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_data = \"valid_data\" data_md5 = md5(object_data).hexdigest()", "self.client.set_container_metadata(container_name, headers=headers) # list objects in non-current container response =", "object_info.get('etag')} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'object", "def ddtest_obj_metadata_update(self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Encoding', response.headers, msg=\"Content-Encoding header", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_fails_with_if_unmodified_since( self,", "response = self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag', response.headers, msg=\"Etag header", "POST, OPTIONS'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response')", "= self.client.get_object( container_name, self.default_obj_name) method = 'object retrieval' expected =", "set\") expected = 'gzip' received = response.headers.get('Content-Encoding') self.assertEqual( expected, received,", "headers=headers) method = 'set object metadata' expected = 202 received", "OPTIONS'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object", "self.assertIn( 'Access-Control-Expose-Headers', response.headers, msg=\"Access-Control-Expose-Headers header was set\") expected = 'X-Foo-Header'", "received = response.headers.get('X-Object-Meta-Foo') self.assertEqual( expected, received, msg='object X-Object-Meta-Foo header value", "{expected}' ' received status code {received}') @DataDrivenFixture class ObjectSmokeTest(ObjectStorageFixture): @classmethod", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Allow-Credentials': 'true'}", "be set\") self.assertIn( 'access-control-allow-origin', response.headers, msg=\"access-control-allow-origin header should be set\")", "= '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_obj_name = '{0}_destination'.format(self.default_obj_name) source = '/{0}/{1}'.format(src_container_name,", "should have content type: {0}' ' received: {1}'.format(expected, received)) def", "str(len(updated_object_data)) headers = {'Content-Length': updated_content_length, 'Content-Type': CONTENT_TYPES.get('text')} response = self.client.create_object(", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_start_range( self, object_type, generate_object): container_name = self.create_temp_container(", "self.default_obj_name generate_object(container_name, object_name) response = self.client.delete_object( container_name, object_name) method =", "received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_delete_after( self, object_type, generate_object): container_name = self.create_temp_container(", "src_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) dest_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) src_object_name =", "ddtest_object_creation_with_x_delete_at( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "def ddtest_partial_object_retrieval_with_start_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "ddtest_object_not_modified_with_if_modified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=5-'}", "versioned container' expected = 204 received = response.status_code self.assertEqual( expected,", "If-match Header: https://bugs.launchpad.net/swift/+bug/1279076 \"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "self.client.get_object( dest_container_name, dest_obj_name) method = 'copied object retrieval' expected =", "type: {0}' ' received: {1}'.format(expected, received)) object2_name = 'object2.txt' object2_headers", "= 'http://example.com' received = response.headers.get('access-control-allow-origin') self.assertEqual( expected, received, msg='access-control-allow-origin header", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Headers', response.headers, msg=\"Access-Control-Request-Headers header was", "value' ' expected: {0} received: {1}'.format(expected, received)) headers = {'X-Object-Meta-Foo':", "= str(len(updated_object_data)) headers = {'Content-Length': updated_content_length, 'Content-Type': CONTENT_TYPES.get('text')} response =", "object_name = self.default_obj_name object_headers = { 'Access-Control-Allow-Origin': 'http://example.com'} object_info =", "= self.client.get_object( dest_container_name, dest_obj_name) method = 'copied object retrieval' expected", "disposition header' expected = 201 received = response.status_code self.assertEqual( expected,", "object_info = generate_object(container_name, object_name) headers = {'If-None-Match': 'grok'} response =", "'Content-Disposition': 'attachment; filename=testdata.txt'} object_info = generate_object(container_name, object_name, headers=object_headers) response =", "= \"valid_data\" data_md5 = md5(object_data).hexdigest() upper_etag = data_md5.upper() headers =", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response_md5 = md5(response.content).hexdigest()", "dlo/slo support of If-match Header: https://bugs.launchpad.net/swift/+bug/1279076 \"\"\" container_name = self.create_temp_container(", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = { 'Access-Control-Allow-Methods':", "= 'GET' received = response.headers.get('Access-Control-Request-Method') self.assertEqual( expected, received, msg='object created", "self.default_obj_name) method = 'object retrieval' expected = 404 received =", "flagged as not modified' expected = 304 received = response.status_code", "Scenario: Create an object using chunked transfer encoding. Expected Results:", "'X-Delete-At', response.headers, msg=\"X-Delete-At header was set\") @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object_versioning') def ddtest_versioned_container_creation_with_valid_data(", "self.default_obj_name) self.assertIn( 'X-Delete-At', response.headers, msg=\"X-Delete-At header was set\") @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object_versioning')", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object( dest_container_name, dest_object_name)", "= {'access-control-allow-origin': 'http://example.com', 'access-control-expose-headers': 'X-Trans-Id'} generate_object(container_name, object_name, headers=headers) headers =", "License, Version 2.0 (the \"License\"); you may not use this", "@data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_update_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container(", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name start_time = calendar.timegm(time.gmtime()) future_time =", "created with Etag header' ' value expected: {0} received: {1}'.format(", "method = 'Object creation via chunked transfer' expected = 201", "'partial object retrieval with start range' expected = 206 received", "expected, received, msg='object created with Access-Control-Request-Method header' ' value expected:", "headers = {'X-Versions-Location': object_history_container_name} self.client.set_container_metadata(container_name, headers=headers) # list objects in", "= '{0}_destination'.format(self.default_obj_name) dest = '/{0}/{1}'.format(dest_container_name, dest_object_name) headers = {'Destination': dest}", "self.default_obj_name, headers=headers) method = 'partial object retrieval with start and", "value expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def", "header was set\") expected = 'http://example.com' received = response.headers.get('Access-Control-Allow-Origin') self.assertEqual(", "received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_match( self, object_type, generate_object): \"\"\" Bug", "expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_method( self, object_type, generate_object): container_name", "( ObjectDatasetList, CONTENT_TYPES) CONTAINER_DESCRIPTOR = 'object_smoke_test' STATUS_CODE_MSG = ('{method} expected", "ver1_info.get('response') method = 'object version one creation' expected = 201", "if unmodified since header' expected = 200 received = response.status_code", "' since header') expected = 412 received = response.status_code self.assertEqual(", "ddtest_object_creation_with_content_disposition( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "valid object name' expected = 201 received = response.status_code self.assertEqual(", "Access-Control-Expose-Headers header' ' value expected: {0} received: {1}'.format( expected, received))", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "object using chunked transfer encoding. Expected Results: Return a 201", "method = 'copied object retrieval' expected = 200 received =", "retrieval with start range' expected = 206 received = response.status_code", "filed for dlo/slo support of If-match Header: https://bugs.launchpad.net/swift/+bug/1279076 \"\"\" container_name", "headers=object_headers) response = object_info.get('response') method = 'object creation with X-Delete-At", "expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_x_delete_at( self, object_type,", "'GET, POST, OPTIONS'} object_info = generate_object(container_name, object_name, headers=object_headers) response =", "received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_delete_after( self, object_type, generate_object): container_name", "(future date)' expected = 304 received = response.status_code self.assertEqual( expected,", "Content-Encoding header' expected = 201 received = response.status_code self.assertEqual( expected,", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Delete-At',", "metadata X-Object-Meta-Grok: Drok' expected = 202 received = response.status_code self.assertEqual(", "2) object_name = self.default_obj_name ver2_info = generate_object(container_name, object_name) response =", "= response.headers.get('Access-Control-Allow-Credentials') self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Credentials header'", "updated_content_length, 'Content-Type': CONTENT_TYPES.get('text')} response = self.client.create_object( container_name, self.default_obj_name, headers=headers, data=updated_object_data)", "def ddtest_object_retrieval_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "object_name = self.default_obj_name object_headers = {'Access-Control-Max-Age': '5'} object_info = generate_object(container_name,", "response.headers, msg=\"access-control-expose-headers header should be set\") self.assertIn( 'access-control-allow-origin', response.headers, msg=\"access-control-allow-origin", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) updated_object_data = 'Updated test", "= 'put copy object' expected = 201 received = response.status_code", "def ddtest_object_creation_with_etag( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "container_name, object2_name) self.assertEqual( expected, received, msg='object created should have content", "def ddtest_object_deletion_with_valid_object( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "headers = {'Range': 'bytes=5-8'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers)", "created with Access-Control-Request-Method header' ' value expected: {0} received: {1}'.format(", "create_response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) object_response =", "expected = 'X-Foo-Header' received = response.headers.get('Access-Control-Expose-Headers') self.assertEqual( expected, received, msg='object", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "Aug 2101 18:44:42 GMT'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers)", "needs review') @data_driven_test(ObjectDatasetList()) def ddtest_put_copy_object(self, object_type, generate_object): src_container_name = self.create_temp_container(", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_info = generate_object(container_name, object_name) response", "method=method, expected=expected, received=str(received))) object_response = self.client.get_object(container_name, self.default_obj_name) method = 'Object", "retrieval' expected = 200 received = object_response.status_code self.assertEqual( expected, received,", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "response = self.client.copy_object( src_container_name, src_object_name, headers=headers) method = 'copy object'", "= self.client.get_object( container_name, object2_name) expected = 'text/plain' received = response.headers.get('content-type')", "object2_name = 'object2.txt' object2_headers = {'X-Detect-Content-Type': False, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name,", "container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Foo', response.headers, msg=\"object updated with X-Object-Meta-Foo header\")", "CONTENT_TYPES.get('text')} response = self.client.create_object( container_name, self.default_obj_name, headers=headers, data=updated_object_data) method =", "msg='object created with Access-Control-Request-Headers header' ' value expected: {0} received:", "{'Origin': 'http://example.com'} response = self.client.get_object_metadata( container_name, object_name, headers=headers) self.assertIn( 'access-control-expose-headers',", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "with if none match header' expected = 200 received =", "headers=object2_headers) response = self.client.get_object( container_name, object1_name) expected = 'application/x-www-form-urlencoded' received", "object_name) response = object_info.get('response') method = 'object creation with etag", "unmodified' ' since header') expected = 412 received = response.status_code", "= {'Range': 'bytes=5-8'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method", "and end range' expected = 206 received = response.status_code self.assertEqual(", "type: {0}' ' received: {1}'.format(expected, received)) response = self.client.get_object( container_name,", "container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Methods', response.headers, msg=\"Access-Control-Allow-Methods header was set\") expected", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Expose-Headers': 'X-Foo-Header'} object_info", "object_name) headers = {'If-Modified-Since': 'Fri, 17 Aug 2001 18:44:42 GMT'}", "object2_name, headers=object2_headers) response = self.client.get_object( container_name, object2_name) expected = 'text/plain'", "with X-Delete-At header' expected = 201 received = response.status_code self.assertEqual(", "= self.default_obj_name obj_info = generate_object(container_name, object_name) headers = {'If-Match': obj_info.get('etag')}", "under the License is distributed on an \"AS IS\" BASIS,", "object metadata' expected = 202 received = response.status_code self.assertEqual( expected,", "@ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_methods( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name start_time = calendar.timegm(time.gmtime()) future_time", "self, object_type, generate_object): \"\"\" Bug filed for dlo/slo support of", "generate_object(container_name, object_name, headers={'X-Object-Meta-Grok': 'Drok'}) response = self.client.get_object_metadata( container_name, object_name) self.assertIn(", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Grok',", "response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response =", "future_time received = response.headers.get('X-Delete-At') self.assertEqual( expected, received, msg='object created with", "object_info.get('response') method = 'object creation with Access-Control-Allow-Origin header' expected =", "def ddtest_object_creation_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "@data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "def ddtest_object_creation_with_access_controle_max_age( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "this file except in compliance with the License. You may", "reflect origin' ' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo']))", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) headers = {'If-None-Match':", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_start_range( self, object_type, generate_object): container_name =", "expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_origin( self, object_type, generate_object): container_name", "= 'update version one object' expected = 201 received =", "1) object_name = self.default_obj_name ver1_info = generate_object(container_name, object_name) response =", "container_name, object1_name) expected = 'application/x-www-form-urlencoded' received = response.headers.get('content-type') self.assertEqual( expected,", "method=method, expected=expected, received=str(received))) # Update an object (version 2) object_name", "identical object') @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_update_with_valid_object_name( self, object_type, generate_object): container_name", "= {'Range': 'bytes=5-'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method", "dest_obj_name = '{0}_destination'.format(self.default_obj_name) source = '/{0}/{1}'.format(src_container_name, src_object_name) hdrs = {'X-Copy-From':", "{'X-Delete-After': '60'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response')", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_none_match( self, object_type, generate_object): \"\"\"", "response = object_info.get('response') method = 'object creation with Access-Control-Expose-Headers header'", "{'Access-Control-Expose-Headers': 'X-Foo-Header'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response')", "response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'object retrieval", "received, msg='object created should have content type: {0}' ' received:", "@ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_credentials( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=5-'} response", "msg=\"Access-Control-Max-Age header was set\") expected = '5' received = response.headers.get('Access-Control-Max-Age')", "self.default_obj_name def object_data_op(data, extra_data): data = zlib.compress(data) return (data, extra_data)", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name, headers={'X-Object-Meta-Grok': 'Drok'})", "'object retrieval with if modified since header (future date)' expected", "= 'object retrieval' expected = 200 received = response.status_code self.assertEqual(", "file except in compliance with the License. You may obtain", "msg='object created with X-Delete-At header value' ' expected: {0} received:", "should be flagged as not modified' expected = 304 received", "headers=headers) method = 'object should be flagged as not modified'", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'X-Object-Meta-Grok': 'Drok'}", "'{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_object_name = '{0}_destination'.format(self.default_obj_name) dest = '/{0}/{1}'.format(dest_container_name, dest_object_name)", "'copy object' expected = 201 received = response.status_code self.assertEqual( expected,", "' value expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo']))", "object_name = self.default_obj_name object_headers = {'Access-Control-Allow-Credentials': 'true'} object_info = generate_object(container_name,", "OR CONDITIONS OF ANY KIND, either express or implied. See", "GMT'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = ('object", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_not_modified_with_if_modified_since(", "object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_info", "= self.default_obj_name generate_object(container_name, object_name) headers = {'X-Object-Meta-Grok': 'Drok'} response =", "received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_controle_max_age( self, object_type,", "self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object updated with X-Object-Meta-Grok header\") expected =", "= {'Range': 'bytes=99-0'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method", "response = object_info.get('response') method = 'object creation with Access-Control-Request-Headers header'", "X-Object-Meta-Foo header value expected: {0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList())", "@ObjectStorageFixture.required_features('object-cors') def ddtest_object_retrieval_with_origin( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_retrieval_with_origin( self, object_type, generate_object): container_name =", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_fails_with_if_unmodified_since( self, object_type, generate_object): container_name", "updated_content_length = str(len(updated_object_data)) headers = {'Content-Length': updated_content_length, 'Content-Type': CONTENT_TYPES.get('text')} response", "ObjectStorageFixture from cloudroast.objectstorage.generators import ( ObjectDatasetList, CONTENT_TYPES) CONTAINER_DESCRIPTOR = 'object_smoke_test'", "self.assertIn( 'Access-Control-Allow-Origin', response.headers, msg=\"Access-Control-Allow-Origin header was set\") expected = 'http://example.com'", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'X-Delete-After': '60'}", "self.default_obj_name) method = 'Object retrieval' expected = 200 received =", "{1}'.format(expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_creation_with_file_compression( self, object_type, generate_object): container_name", "= self.client.get_object_metadata( container_name, object_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object not created", "'object retrieval with if modified since header (past date)' expected", "response.headers.get('Access-Control-Allow-Credentials') self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Credentials header' '", "create_response = self.client.create_object(container_name, object_name, data=object_data, headers=headers) method = 'object creation", "response = self.client.get_object( container_name, object2_name) self.assertEqual( expected, received, msg='object created", "descriptor=CONTAINER_DESCRIPTOR) object1_name = 'object1.txt' object1_headers = {'X-Detect-Content-Type': True, 'Content-Type': 'application/x-www-form-urlencoded'}", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Expose-Headers': 'X-Foo-Header'} object_info =", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Method', response.headers, msg=\"Access-Control-Request-Method", "headers=object_headers) response = object_info.get('response') method = 'object creation with Access-Control-Allow-Methods", "= md5(object_data).hexdigest() upper_etag = data_md5.upper() headers = {\"ETag\": upper_etag} create_response", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_unmodified_since( self,", "object2_headers = {'X-Detect-Content-Type': False, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object2_name, headers=object2_headers) response", "= 'object_smoke_test' STATUS_CODE_MSG = ('{method} expected status code {expected}' '", "def ddtest_partial_object_retrieval_with_end_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "the License. \"\"\" import calendar import time import zlib from", "def ddtest_object_creation_with_access_control_allow_origin( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "object_name) headers = {'If-Unmodified-Since': 'Fri, 17 Aug 2101 18:44:42 GMT'}", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) # Create an object (version", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'X-Delete-After': '60'} object_info", "headers = {'Range': 'bytes=-4'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers)", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {\"Transfer-Encoding\": \"chunked\"} create_response = self.client.create_object( container_name,", "from cafe.drivers.unittest.decorators import ( DataDrivenFixture, data_driven_test) from cloudcafe.objectstorage.objectstorage_api.common.constants import \\", "object retrieval with complete range' expected = 200 received =", "generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name,", "object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_history_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Methods', response.headers, msg=\"Access-Control-Allow-Methods header", "= self.client.copy_object( src_container_name, src_object_name, headers=headers) method = 'copy object' expected", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_copy_object(self, object_type,", "object retrieval with start and end range' expected = 206", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) # Update an object", "'http://example.com'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method", "self.default_obj_name, headers=headers) method = ('object retrieval precondition fail with if", "= response.headers.get('Access-Control-Expose-Headers') self.assertEqual( expected, received, msg='object created with Access-Control-Expose-Headers header'", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=5-8'}", "cloudroast.objectstorage.generators import ( ObjectDatasetList, CONTENT_TYPES) CONTAINER_DESCRIPTOR = 'object_smoke_test' STATUS_CODE_MSG =", "header was set\") expected = 'gzip' received = response.headers.get('Content-Encoding') self.assertEqual(", "= response.headers.get('X-Delete-At') self.assertEqual( expected, received, msg='object created with X-Delete-At header", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object updated", "generate_object(src_container_name, src_object_name) dest_obj_name = '{0}_destination'.format(self.default_obj_name) source = '/{0}/{1}'.format(src_container_name, src_object_name) hdrs", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_data = \"valid_data\"", "2.0 (the \"License\"); you may not use this file except", "= 'object1.txt' object1_headers = {'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) object2_name", "= object_info.get('response') method = 'object creation with Access-Control-Request-Headers header' expected", "('object retrieval precondition fail with if unmodified' ' since header')", "content disposition header' expected = 201 received = response.status_code self.assertEqual(", "ddtest_partial_object_retrieval_with_end_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "= self.default_obj_name start_time = calendar.timegm(time.gmtime()) future_time = str(int(start_time + 60))", "use this file except in compliance with the License. You", "content type: {0}' ' received: {1}'.format(expected, received)) object2_name = 'object2.txt'", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Delete-At', response.headers, msg=\"X-Delete-At header", "with X-Object-Meta-Grok header value' ' expected: {0} received: {1}'.format(expected, received))", "received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_x_delete_at( self, object_type, generate_object): container_name = self.create_temp_container(", "ddtest_object_update_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "'bytes=5-8'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'partial", "method = 'set object metadata' expected = 202 received =", "response = self.client.get_object_metadata( container_name, self.default_obj_name) response = self.client.get_object( container_name, self.default_obj_name)", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Credentials', response.headers, msg=\"Access-Control-Allow-Credentials header", "self.client.get_object_metadata( container_name, object_name, headers=headers) self.assertIn( 'access-control-expose-headers', response.headers, msg=\"access-control-expose-headers header should", "= 'object2.txt' object2_headers = {'X-Detect-Content-Type': True} generate_object(container_name, object2_name, headers=object2_headers) response", "headers = {'X-Object-Meta-Grok': 'Drok'} response = self.client.set_object_metadata( container_name, object_name, headers=headers)", "generate_object(container_name, object2_name, headers=object2_headers) response = self.client.get_object( container_name, object2_name) expected =", "{'X-Versions-Location': object_history_container_name} self.client.set_container_metadata(container_name, headers=headers) # list objects in non-current container", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_not_modified_with_if_modified_since( self, object_type, generate_object): container_name = self.create_temp_container(", "self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=5-'} response = self.client.get_object(", "expected = 204 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format(", "response = self.client.get_object( dest_container_name, dest_obj_name) method = 'copied object retrieval'", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_not_modified_with_if_modified_since( self, object_type, generate_object):", "expected = object_info.get('etag') else: expected = '\"{0}\"'.format(object_info.get('etag')) received = response.headers.get('etag')", "headers = {\"ETag\": upper_etag} create_response = self.client.create_object(container_name, object_name, data=object_data, headers=headers)", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_deletion_with_valid_object( self, object_type, generate_object):", "response = self.client.get_object( container_name, object2_name) expected = 'text/plain' received =", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_info = generate_object(container_name, object_name) headers =", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Headers', response.headers,", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_complete_range( self, object_type,", "dest_object_name) headers = {'Destination': dest} response = self.client.copy_object( src_container_name, src_object_name,", "self.assertEqual( expected, received, msg='object created with Content-Disposition header value' '", "' received: {1}'.format(expected, received)) response = self.client.get_object( container_name, object2_name) self.assertEqual(", "data=self.generate_chunk_data()) method = 'Object creation via chunked transfer' expected =", "= response.headers.get('Access-Control-Allow-Methods') self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Methods header'", "= self.default_obj_name def object_data_op(data, extra_data): data = zlib.compress(data) return (data,", "= 200 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method,", "msg='should return identical object') @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_update_with_valid_object_name( self, object_type,", "start_time = calendar.timegm(time.gmtime()) future_time = str(int(start_time + 60)) object_headers =", "expected = 304 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format(", "(the \"License\"); you may not use this file except in", "hashlib import md5 import unittest from cafe.drivers.unittest.decorators import ( DataDrivenFixture,", "expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_controle_max_age(", "'object creation with content disposition header' expected = 201 received", "response.headers.get('Access-Control-Request-Headers') self.assertEqual( expected, received, msg='object created with Access-Control-Request-Headers header' '", "self.default_obj_name object_headers = {'Access-Control-Max-Age': '5'} object_info = generate_object(container_name, object_name, headers=object_headers)", "generate_object(container_name, object_name) headers = {'Range': 'bytes=-4'} response = self.client.get_object( container_name,", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) object_response = self.client.get_object(", "{'Access-Control-Allow-Credentials': 'true'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response')", "@unittest.skip('Problem with this tests assertion, needs review') @data_driven_test(ObjectDatasetList()) def ddtest_put_copy_object(self,", "generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method = 'object creation", "md5(object_data).hexdigest() upper_etag = data_md5.upper() headers = {\"ETag\": upper_etag} create_response =", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) # Create an object", "updated with X-Object-Meta-Grok header\") expected = 'Drok' received = response.headers.get('X-Object-Meta-Grok')", "method = 'list on versioned container' expected = 200 received", "= self.client.copy_object( dest_container_name, dest_obj_name, headers=hdrs) method = 'put copy object'", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {\"Transfer-Encoding\": \"chunked\"} create_response = self.client.create_object(", "object2_name) self.assertEqual( expected, received, msg='object created should have content type:", "= self.client.get_object(container_name, object_name) method = 'object creation with valid object", "{0}' ' received: {1}'.format(expected, received)) def test_object_creation_via_chunked_transfer(self): \"\"\" Scenario: Create", "@data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_match( self, object_type, generate_object): \"\"\" Bug filed", "the Apache License, Version 2.0 (the \"License\"); you may not", "or implied. See the License for the specific language governing", "KIND, either express or implied. See the License for the", "calendar.timegm(time.gmtime()) future_time = str(int(start_time + 60)) object_headers = {'X-Delete-At': future_time}", "creation with Access-Control-Allow-Credentials header' expected = 201 received = response.status_code", "creation with etag header' expected = 201 received = response.status_code", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "container_name, self.default_obj_name, headers=headers) method = ('object retrieval precondition fail with", "received)) object2_name = 'object2.txt' object2_headers = {'X-Detect-Content-Type': True} generate_object(container_name, object2_name,", "method = 'list on empty versioned container' expected = 204", "Access-Control-Request-Headers header' expected = 201 received = response.status_code self.assertEqual( expected,", "= 'object creation with Access-Control-Request-Headers header' expected = 201 received", "ddtest_object_creation_with_etag( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "expected = 202 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format(", "'standard': expected = object_info.get('etag') else: expected = '\"{0}\"'.format(object_info.get('etag')) received =", "msg=\"Access-Control-Request-Headers header was set\") expected = 'x-requested-with' received = response.headers.get('Access-Control-Request-Headers')", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_range(", "received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_origin( self, object_type, generate_object): container_name =", "received=str(received))) # Create an object (version 1) object_name = self.default_obj_name", "self.default_obj_name object_headers = {'Access-Control-Request-Method': 'GET'} object_info = generate_object(container_name, object_name, headers=object_headers)", "= self.default_obj_name ver2_info = generate_object(container_name, object_name) response = ver2_info.get('response') method", "received, msg='object created with Access-Control-Expose-Headers header' ' value expected: {0}", "202 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected,", "chunked transfer encoding. Expected Results: Return a 201 status code", "received: {1}'.format(expected, received)) def test_object_creation_via_chunked_transfer(self): \"\"\" Scenario: Create an object", "object_history_container_name} self.client.set_container_metadata(container_name, headers=headers) # list objects in non-current container response", "'bytes=-4'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'partial", "with Access-Control-Request-Method header' expected = 201 received = response.status_code self.assertEqual(", "{0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_controle_max_age( self,", "response = self.client.get_object( container_name, object1_name) expected = 'text/plain' received =", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = { 'Content-Disposition':", "object_headers = {'Access-Control-Expose-Headers': 'X-Foo-Header'} object_info = generate_object(container_name, object_name, headers=object_headers) response", "= self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'partial object retrieval", "received = response.headers.get('Content-Encoding') self.assertEqual( expected, received, msg='object created with Content-Encoding", "the License for the specific language governing permissions and limitations", "= self.default_obj_name object_headers = {'Access-Control-Request-Headers': 'x-requested-with'} object_info = generate_object(container_name, object_name,", "object (version 2) object_name = self.default_obj_name ver2_info = generate_object(container_name, object_name)", "implied. See the License for the specific language governing permissions", "object_name) response = self.client.get_object(container_name, object_name) method = 'object creation with", "header should be set\") expected = 'http://example.com' received = response.headers.get('access-control-allow-origin')", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "object1_headers = {'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) object2_name = 'object2.txt'", "object2_name) expected = 'text/plain' received = response.headers.get('content-type') self.assertEqual( expected, received,", "object_name = self.default_obj_name obj_info = generate_object(container_name, object_name) headers = {'If-Match':", "retrieval with if modified since header (future date)' expected =", "update with valid object name' expected = 201 received =", "with X-Delete-At header value' ' expected: {0} received: {1}'.format(expected, received))", "headers=object_headers) response = object_info.get('response') method = 'object creation with Access-Control-Request-Headers", "' expected: {0} received: {1}'.format(expected, received)) headers = {'X-Object-Meta-Foo': 'Bar'}", "object_name) response = self.client.delete_object( container_name, object_name) method = 'delete object'", "be created. \"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {\"Transfer-Encoding\":", "'{0}_destination'.format(self.default_obj_name) source = '/{0}/{1}'.format(src_container_name, src_object_name) hdrs = {'X-Copy-From': source, 'Content-Length':", "generate_object): src_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) dest_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) src_object_name", "' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_detected_with_detect_content_type( self, object_type, generate_object):", "code and a single object should be created. \"\"\" container_name", "def ddtest_object_creation_with_content_disposition( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Disposition',", "'object creation with X-Delete-After header' expected = 201 received =", "object_name) headers = {'Range': 'bytes=5-'} response = self.client.get_object( container_name, self.default_obj_name,", "self.assertEqual( expected, received, msg='object created should have content type: {0}'", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_copy_object(self, object_type, generate_object): src_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Delete-At', response.headers, msg=\"X-Delete-At", "header was set\") expected = 'true' received = response.headers.get('Access-Control-Allow-Credentials') self.assertEqual(", "if none match header' expected = 200 received = response.status_code", "ddtest_object_creation_with_access_controle_max_age( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "'object creation with valid object name' expected = 201 received", "expected, received, msg='object created with Access-Control-Allow-Credentials header' ' value expected:", "response = object_info.get('response') method = 'object creation with Content-Encoding header'", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_complete_range( self, object_type, generate_object):", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {\"Transfer-Encoding\": \"chunked\"} create_response =", "object_info = generate_object(container_name, object_name) response = object_info.get('response') method = 'object", "header (past date)' expected = 200 received = response.status_code self.assertEqual(", "match header' expected = 200 received = response.status_code self.assertEqual( expected,", "upper_etag = data_md5.upper() headers = {\"ETag\": upper_etag} create_response = self.client.create_object(container_name,", "with valid object name' expected = 200 received = response.status_code", "{1}'.format( expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def test_object_creation_with_uppercase_etag(self): container_name = self.create_temp_container(", "self.default_obj_name ver1_info = generate_object(container_name, object_name) response = ver1_info.get('response') method =", "object_info.get('response') method = 'object creation with Access-Control-Expose-Headers header' expected =", "writing, software distributed under the License is distributed on an", "= '5' received = response.headers.get('Access-Control-Max-Age') self.assertEqual( expected, received, msg='object created", "received = response.headers.get('Access-Control-Allow-Methods') self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Methods", "'X-Object-Meta-Foo', response.headers, msg=\"object updated with X-Object-Meta-Foo header\") expected = 'Bar'", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'X-Delete-After':", "= self.default_obj_name headers = {'access-control-allow-origin': 'http://example.com', 'access-control-expose-headers': 'X-Trans-Id'} generate_object(container_name, object_name,", "headers=object_headers) response = object_info.get('response') method = 'object creation with Content-Encoding", "msg=\"object not created with X-Object-Meta-Grok header\") expected = 'Drok' received", "'Access-Control-Allow-Origin', response.headers, msg=\"Access-Control-Allow-Origin header was set\") expected = 'http://example.com' received", "in compliance with the License. You may obtain a copy", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Disposition', response.headers, msg=\"Content-Disposition header was set\")", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_start_range( self,", "set\") expected = 'true' received = response.headers.get('Access-Control-Allow-Credentials') self.assertEqual( expected, received,", "expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_delete_after( self, object_type,", "i in range(10): yield \"Test chunk %s\\r\\n\" % i @data_driven_test(ObjectDatasetList())", "object_name = self.default_obj_name def object_data_op(data, extra_data): data = zlib.compress(data) return", "should have content type: {0}' ' received: {1}'.format(expected, received)) object2_name", "generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers =", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Expose-Headers':", "received, msg='object created with Access-Control-Request-Method header' ' value expected: {0}", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Max-Age':", "self.assertIn( 'access-control-expose-headers', response.headers, msg=\"access-control-expose-headers header should be set\") self.assertIn( 'access-control-allow-origin',", "object_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object not created with X-Object-Meta-Grok header\")", "= response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response_md5", "generate_object(container_name, object_name) response = self.client.get_object(container_name, object_name) method = 'object creation", "msg='object created with Access-Control-Allow-Credentials header' ' value expected: {0} received:", "= 'object retrieval' expected = 404 received = response.status_code self.assertEqual(", "generate_object(container_name, object_name, headers=headers) headers = {'Origin': 'http://example.com'} response = self.client.get_object_metadata(", "@ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_origin( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def test_object_creation_with_uppercase_etag(self): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "'Access-Control-Request-Headers', response.headers, msg=\"Access-Control-Request-Headers header was set\") expected = 'x-requested-with' received", "= generate_object(container_name, object_name) response = ver2_info.get('response') method = 'update version", "{'X-Object-Meta-Grok': 'Drok'} response = self.client.set_object_metadata( container_name, object_name, headers=headers) method =", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update_with_object_possessing_metadata(", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update_with_object_possessing_metadata( self, object_type, generate_object): container_name =", "code {received}') @DataDrivenFixture class ObjectSmokeTest(ObjectStorageFixture): @classmethod def setUpClass(cls): super(ObjectSmokeTest, cls).setUpClass()", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_etag( self, object_type,", "if unmodified' ' since header') expected = 412 received =", "self.assertEqual( expected, received, msg='object created with Etag header' ' value", "either express or implied. See the License for the specific", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_etag( self, object_type, generate_object): container_name", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @unittest.skip('Problem with this tests assertion, needs", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) # Update an object (version 2)", "\"License\"); you may not use this file except in compliance", "object_info.get('response') method = 'object creation with Access-Control-Max-Age header' expected =", "object_headers = {'Access-Control-Request-Headers': 'x-requested-with'} object_info = generate_object(container_name, object_name, headers=object_headers) response", "not modified' expected = 304 received = response.status_code self.assertEqual( expected,", "method=method, expected=expected, received=str(received))) # Create an object (version 1) object_name", "object_headers = { 'Access-Control-Allow-Methods': 'GET, POST, OPTIONS'} object_info = generate_object(container_name,", "unmodified since header' expected = 200 received = response.status_code self.assertEqual(", "object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=5-8'} response", "{1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_retrieval_with_origin( self, object_type, generate_object):", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "obj_info.get('etag')} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'object", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Origin',", "= Constants.VALID_OBJECT_NAME_WITH_UNICODE @staticmethod def generate_chunk_data(): for i in range(10): yield", "with Access-Control-Allow-Methods header' expected = 201 received = response.status_code self.assertEqual(", "self.default_obj_name) self.assertIn( 'X-Object-Meta-Foo', response.headers, msg=\"object updated with X-Object-Meta-Foo header\") expected", "= object_info.get('response') method = 'object creation with Access-Control-Allow-Credentials header' expected", "'Access-Control-Allow-Methods', response.headers, msg=\"Access-Control-Allow-Methods header was set\") expected = 'GET, POST,", "self.default_obj_name) self.assertIn( 'Content-Disposition', response.headers, msg=\"Content-Disposition header was set\") expected =", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) object_response = self.client.get_object(container_name,", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) object_response = self.client.get_object( container_name, self.default_obj_name)", "generate_object(container_name, object_name) headers = {'If-Match': obj_info.get('etag')} response = self.client.get_object( container_name,", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_none_match( self,", "self.assertIn( 'Access-Control-Allow-Credentials', response.headers, msg=\"Access-Control-Allow-Credentials header was set\") expected = 'true'", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Request-Headers':", "self.default_obj_name, headers=headers) method = 'partial object retrieval with start range'", "start and end range' expected = 206 received = response.status_code", "(version 1) object_name = self.default_obj_name ver1_info = generate_object(container_name, object_name) response", "msg='object created with Content-Encoding header value' ' expected: {0} received:", "headers = {'Range': 'bytes=5-'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers)", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_object_name = '{0}_destination'.format(self.default_obj_name)", "msg='object created with Etag header' ' value expected: {0} received:", "= 'object creation with Content-Encoding header' expected = 201 received", "should have content type: {0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList())", "with Access-Control-Allow-Methods header' ' value expected: {0} received: {1}'.format( expected,", "Access-Control-Allow-Methods header' expected = 201 received = response.status_code self.assertEqual( expected,", "= self.default_obj_name ver1_info = generate_object(container_name, object_name) response = ver1_info.get('response') method", "= calendar.timegm(time.gmtime()) future_time = str(int(start_time + 60)) object_headers = {'X-Delete-At':", "none match header' expected = 200 received = response.status_code self.assertEqual(", "'object retrieval with if none match header' expected = 200", "@ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_expose_headers( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_range( self, object_type,", "= 'object retrieval with if match header' expected = 200", "response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def", "dest_container_name, dest_obj_name) method = 'copied object retrieval' expected = 200", "'object creation with valid object name' expected = 200 received", "'object_smoke_test' STATUS_CODE_MSG = ('{method} expected status code {expected}' ' received", "received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_content_disposition( self, object_type, generate_object): container_name = self.create_temp_container(", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name, headers={'X-Object-Meta-Grok':", "zlib.compress(data) return (data, extra_data) object_headers = {'Content-Encoding': 'gzip'} object_info =", "header\") expected = 'Bar' received = response.headers.get('X-Object-Meta-Foo') self.assertEqual( expected, received,", "'Access-Control-Expose-Headers', response.headers, msg=\"Access-Control-Expose-Headers header was set\") expected = 'X-Foo-Header' received", "self.default_obj_name object_info = generate_object(container_name, object_name) response = object_info.get('response') method =", "'gzip' received = response.headers.get('Content-Encoding') self.assertEqual( expected, received, msg='object created with", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) response =", "set\") expected = '5' received = response.headers.get('Access-Control-Max-Age') self.assertEqual( expected, received,", "= ('object retrieval precondition fail with if unmodified' ' since", "received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def test_object_creation_with_uppercase_etag(self): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "content type: {0}' ' received: {1}'.format(expected, received)) response = self.client.get_object(", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_modified_since(", "'Access-Control-Allow-Origin': 'http://example.com'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response')", "object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'If-Modified-Since': 'Fri, 17", "= 'object2.txt' object2_headers = {'X-Detect-Content-Type': False, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object2_name,", "@data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_method( self, object_type, generate_object): container_name = self.create_temp_container(", "'object creation with Access-Control-Request-Headers header' expected = 201 received =", "{'If-Match': obj_info.get('etag')} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method =", "cloudroast.objectstorage.fixtures import ObjectStorageFixture from cloudroast.objectstorage.generators import ( ObjectDatasetList, CONTENT_TYPES) CONTAINER_DESCRIPTOR", "with start and end range' expected = 206 received =", "method = 'object update with valid object name' expected =", "object retrieval' expected = 200 received = response.status_code self.assertEqual( expected,", "modified since header (future date)' expected = 304 received =", "object_data = \"valid_data\" data_md5 = md5(object_data).hexdigest() upper_etag = data_md5.upper() headers", "object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name = 'object1.txt' object1_headers", "= self.client.get_object( container_name, self.default_obj_name, headers=headers) method = ('object retrieval precondition", "headers = {'Origin': 'http://example.com'} response = self.client.get_object_metadata( container_name, object_name, headers=headers)", "msg=\"Content-Disposition header was set\") expected = 'attachment; filename=testdata.txt' received =", "= response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList())", "@data_driven_test(ObjectDatasetList()) def ddtest_object_not_modified_with_if_modified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "msg='object created with Content-Disposition header value' ' expected: {0} received:", "ddtest_content_type_not_detected_without_detect_content_type_header( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name =", "response = object_info.get('response') method = 'object creation with X-Delete-At header'", "object_name = self.default_obj_name object_headers = {'Access-Control-Request-Method': 'GET'} object_info = generate_object(container_name,", "object_name = self.default_obj_name generate_object(container_name, object_name) response = self.client.delete_object( container_name, object_name)", "self.assertEqual( expected, received, msg='access-control-allow-origin header should reflect origin' ' expected:", "received = response.headers.get('Access-Control-Max-Age') self.assertEqual( expected, received, msg='object created with Access-Control-Max-Age", "received)) response = self.client.get_object( container_name, object2_name) self.assertEqual( expected, received, msg='object", "response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received, msg='object X-Object-Meta-Grok header value expected: {0}'", "creation with Access-Control-Max-Age header' expected = 201 received = response.status_code", "= {\"Transfer-Encoding\": \"chunked\"} create_response = self.client.create_object( container_name, self.default_obj_name, headers=headers, data=self.generate_chunk_data())", "self.default_obj_name) self.assertIn( 'Access-Control-Request-Headers', response.headers, msg=\"Access-Control-Request-Headers header was set\") expected =", "with if modified since header (future date)' expected = 304", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_modified_since( self, object_type, generate_object):", "created with Access-Control-Expose-Headers header' ' value expected: {0} received: {1}'.format(", "= self.client.get_object_metadata( container_name, self.default_obj_name) response = self.client.get_object( container_name, self.default_obj_name) self.assertIn(", "with Content-Disposition header value' ' expected: {0} received: {1}'.format(expected, received))", "expected = 404 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format(", "object1_name = 'object1.txt' object1_headers = {'X-Detect-Content-Type': True, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name,", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name start_time = calendar.timegm(time.gmtime())", "{0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update(self, object_type, generate_object):", "generate_object(container_name, object_name) headers = {'If-None-Match': 'grok'} response = self.client.get_object( container_name,", "expected, received, msg='object created should have content type: {0}' '", "with valid object name' expected = 201 received = response.status_code", "self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Credentials header' ' value", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Expose-Headers', response.headers, msg=\"Access-Control-Expose-Headers header was", "msg='object X-Object-Meta-Grok header value expected: {0}' ' received: {1}'.format(expected, received))", "'Drok'}) response = self.client.get_object_metadata( container_name, object_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object", "set\") expected = data_md5 received = object_response.headers.get('etag') self.assertEqual( expected, received,", "received, msg='object created with Content-Disposition header value' ' expected: {0}", "method = 'object creation with uppercase etag header' expected =", "'delete object' expected = 204 received = response.status_code self.assertEqual( expected,", "self.default_obj_name, headers=headers) method = 'set object metadata' expected = 202", "headers = {'If-Modified-Since': 'Fri, 17 Aug 2101 18:44:42 GMT'} response", "= {'Access-Control-Expose-Headers': 'X-Foo-Header'} object_info = generate_object(container_name, object_name, headers=object_headers) response =", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_complete_range( self, object_type, generate_object): container_name", "headers=object1_headers) object2_name = 'object2.txt' object2_headers = {'X-Detect-Content-Type': False, 'Content-Type': 'application/x-www-form-urlencoded'}", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) response = self.client.delete_object( container_name,", "headers={'X-Object-Meta-Grok': 'Drok'}) response = self.client.get_object_metadata( container_name, object_name) self.assertIn( 'X-Object-Meta-Grok', response.headers,", "limitations under the License. \"\"\" import calendar import time import", "source, 'Content-Length': '0'} response = self.client.copy_object( dest_container_name, dest_obj_name, headers=hdrs) method", "self.default_obj_name ver2_info = generate_object(container_name, object_name) response = ver2_info.get('response') method =", "'X-Foo-Header' received = response.headers.get('Access-Control-Expose-Headers') self.assertEqual( expected, received, msg='object created with", "{'If-Modified-Since': 'Fri, 17 Aug 2001 18:44:42 GMT'} response = self.client.get_object(", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_copy_object(self, object_type, generate_object): src_container_name = self.create_temp_container(", "received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_retrieval_with_origin( self, object_type,", "= self.default_obj_name object_headers = {'Access-Control-Expose-Headers': 'X-Foo-Header'} object_info = generate_object(container_name, object_name,", "cls.default_obj_name = Constants.VALID_OBJECT_NAME_WITH_UNICODE @staticmethod def generate_chunk_data(): for i in range(10):", "filename=testdata.txt'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method", "filename=testdata.txt' received = response.headers.get('Content-Disposition') self.assertEqual( expected, received, msg='object created with", "@data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_delete_after( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'partial object retrieval with", "method = 'object retrieval with if none match header' expected", "self.default_obj_name, headers=headers, data=updated_object_data) method = 'object update with valid object", "created with Access-Control-Allow-Credentials header' ' value expected: {0} received: {1}'.format(", "'GET'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Origin', response.headers, msg=\"Access-Control-Allow-Origin header was set\")", "method = 'partial object retrieval with complete range' expected =", "ddtest_object_creation_with_access_control_allow_methods( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "set\") expected = 'http://example.com' received = response.headers.get('access-control-allow-origin') self.assertEqual( expected, received,", "on versioned container' expected = 200 received = response.status_code self.assertEqual(", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) response = self.client.get_object(container_name, object_name)", "object_name = self.default_obj_name object_data = \"valid_data\" data_md5 = md5(object_data).hexdigest() upper_etag", "@data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_controle_max_age( self, object_type, generate_object): container_name = self.create_temp_container(", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.list_objects(object_history_container_name) method = 'list", "transfer encoding. Expected Results: Return a 201 status code and", "{'If-None-Match': 'grok'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method =", "created with X-Delete-At header value' ' expected: {0} received: {1}'.format(expected,", "= self.default_obj_name object_headers = {'X-Delete-After': '60'} object_info = generate_object(container_name, object_name,", "object_name) headers = {'Range': 'bytes=5-8'} response = self.client.get_object( container_name, self.default_obj_name,", "msg='object created with X-Object-Meta-Grok header value' ' expected: {0} received:", "support of If-match Header: https://bugs.launchpad.net/swift/+bug/1279076 \"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "was set\") if object_type == 'standard': expected = object_info.get('etag') else:", "created with Access-Control-Max-Age header' ' value expected: {0} received: {1}'.format(", "object_name = self.default_obj_name object_headers = {'X-Delete-After': '60'} object_info = generate_object(container_name,", "specific language governing permissions and limitations under the License. \"\"\"", "method = 'object creation with Access-Control-Allow-Methods header' expected = 201", "expected=expected, received=str(received))) # Update an object (version 2) object_name =", "response = self.client.get_object( container_name, object1_name) expected = 'application/x-www-form-urlencoded' received =", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = { 'Access-Control-Allow-Origin': 'http://example.com'}", "setUpClass(cls): super(ObjectSmokeTest, cls).setUpClass() cls.default_obj_name = Constants.VALID_OBJECT_NAME_WITH_UNICODE @staticmethod def generate_chunk_data(): for", "= self.default_obj_name generate_object(container_name, object_name) headers = {'If-Unmodified-Since': 'Fri, 17 Aug", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=99-0'}", "expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_credentials( self, object_type, generate_object): container_name", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name, headers={'X-Object-Meta-Grok': 'Drok'}) response =", "with Access-Control-Expose-Headers header' ' value expected: {0} received: {1}'.format( expected,", "received, msg='access-control-allow-origin header should reflect origin' ' expected: {0} received:", "= 'attachment; filename=testdata.txt' received = response.headers.get('Content-Disposition') self.assertEqual( expected, received, msg='object", "received = response.headers.get('Access-Control-Request-Method') self.assertEqual( expected, received, msg='object created with Access-Control-Request-Method", "applicable law or agreed to in writing, software distributed under", "ddtest_versioned_container_creation_with_valid_data( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_history_container_name =", "tests assertion, needs review') @data_driven_test(ObjectDatasetList()) def ddtest_put_copy_object(self, object_type, generate_object): src_container_name", "= self.client.set_object_metadata( container_name, self.default_obj_name, headers=headers) method = 'set object metadata'", "\\ Constants from cloudroast.objectstorage.fixtures import ObjectStorageFixture from cloudroast.objectstorage.generators import (", "headers=headers) method = 'partial object retrieval with start and end", "retrieval with if none match header' expected = 200 received", "= 'list on empty versioned container' expected = 204 received", "= response.headers.get('Content-Disposition') self.assertEqual( expected, received, msg='object created with Content-Disposition header", "object1_name) expected = 'text/plain' received = response.headers.get('content-type') self.assertEqual( expected, received,", "= 'partial object retrieval with start and end range' expected", "retrieval with end range' expected = 206 received = response.status_code", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_not_modified_with_if_modified_since( self,", "container_name, object1_name) expected = 'text/plain' received = response.headers.get('content-type') self.assertEqual( expected,", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_etag(", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_obj_name =", "with content disposition header' expected = 201 received = response.status_code", "msg=\"Access-Control-Allow-Methods header was set\") expected = 'GET, POST, OPTIONS' received", "{ 'Access-Control-Allow-Methods': 'GET, POST, OPTIONS'} object_info = generate_object(container_name, object_name, headers=object_headers)", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn(", "Content-Encoding header value' ' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList())", "'Access-Control-Allow-Methods': 'GET, POST, OPTIONS'} object_info = generate_object(container_name, object_name, headers=object_headers) response", "= 'Object creation via chunked transfer' expected = 201 received", "(data, extra_data) object_headers = {'Content-Encoding': 'gzip'} object_info = generate_object(container_name, object_name,", "'object creation with Content-Encoding header' expected = 201 received =", "= {'X-Delete-After': '60'} object_info = generate_object(container_name, object_name, headers=object_headers) response =", "content type: {0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_detected_with_detect_content_type(", "object_info.get('response') method = 'object creation with content disposition header' expected", "self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=99-0'} response = self.client.get_object(", "transfer' expected = 201 received = create_response.status_code self.assertEqual( expected, received,", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Allow-Credentials': 'true'} object_info", "OF ANY KIND, either express or implied. See the License", "= self.client.get_object( container_name, object2_name) self.assertEqual( expected, received, msg='object created should", "ddtest_object_creation_with_access_control_allow_origin( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "test file data' updated_content_length = str(len(updated_object_data)) headers = {'Content-Length': updated_content_length,", "header was set\") expected = 'GET, POST, OPTIONS' received =", "response.headers, msg=\"Content-Encoding header was set\") expected = 'gzip' received =", "container' expected = 200 received = response.status_code self.assertEqual( expected, received,", "'attachment; filename=testdata.txt' received = response.headers.get('Content-Disposition') self.assertEqual( expected, received, msg='object created", "response.headers.get('content-type') self.assertEqual( expected, received, msg='object created should have content type:", "from cloudroast.objectstorage.fixtures import ObjectStorageFixture from cloudroast.objectstorage.generators import ( ObjectDatasetList, CONTENT_TYPES)", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Foo', response.headers, msg=\"object updated", "value expected: {0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def", "ver2_info = generate_object(container_name, object_name) response = ver2_info.get('response') method = 'update", "= object_info.get('response') method = 'object creation with Access-Control-Allow-Origin header' expected", "header value expected: {0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update_with_object_possessing_metadata( self, object_type, generate_object): container_name = self.create_temp_container(", "self.default_obj_name generate_object(container_name, object_name) response = self.client.get_object(container_name, object_name) method = 'object", "received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_credentials( self, object_type, generate_object): container_name =", "def ddtest_object_retrieval_fails_with_if_unmodified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_not_detected_without_detect_content_type_header( self, object_type, generate_object): container_name", "' received: {1}'.format(expected, received)) object2_name = 'object2.txt' object2_headers = {'X-Detect-Content-Type':", "= 'object creation with valid object name' expected = 200", "expected=expected, received=str(received))) # Create an object (version 1) object_name =", "'http://example.com', 'access-control-expose-headers': 'X-Trans-Id'} generate_object(container_name, object_name, headers=headers) headers = {'Origin': 'http://example.com'}", "expected = 412 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format(", "object_response.headers.get('etag') self.assertEqual( expected, received, msg='object created with Etag header' '", "method = 'put copy object' expected = 201 received =", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_match( self, object_type, generate_object): \"\"\"", "X-Delete-At header value' ' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList())", "container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Credentials', response.headers, msg=\"Access-Control-Allow-Credentials header was set\") expected", "'access-control-allow-origin', response.headers, msg=\"access-control-allow-origin header should be set\") expected = 'http://example.com'", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_complete_range(", "ddtest_obj_metadata_update_with_object_possessing_metadata( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "data=updated_object_data) method = 'object update with valid object name' expected", "'Access-Control-Request-Method', response.headers, msg=\"Access-Control-Request-Method header was set\") expected = 'GET' received", "'Access-Control-Allow-Credentials', response.headers, msg=\"Access-Control-Allow-Credentials header was set\") expected = 'true' received", "method=method, expected=expected, received=str(received))) response_md5 = md5(response.content).hexdigest() self.assertEqual( object_info.get('md5'), response_md5, msg='should", "headers=object_headers) response = object_info.get('response') method = 'object creation with Access-Control-Allow-Credentials", "object_name) response = object_info.get('response') method = 'object creation with valid", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object( dest_container_name, dest_object_name) method", "('{method} expected status code {expected}' ' received status code {received}')", "expected=expected, received=str(received))) @unittest.skip('Problem with this tests assertion, needs review') @data_driven_test(ObjectDatasetList())", "@data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_etag( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "object_name, headers=headers) headers = {'Origin': 'http://example.com'} response = self.client.get_object_metadata( container_name,", "object_history_container_name) method = 'list on empty versioned container' expected =", "'/{0}/{1}'.format(dest_container_name, dest_object_name) headers = {'Destination': dest} response = self.client.copy_object( src_container_name,", "received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_x_delete_at( self, object_type, generate_object): container_name", "= object_info.get('response') method = 'object creation with Content-Encoding header' expected", "source = '/{0}/{1}'.format(src_container_name, src_object_name) hdrs = {'X-Copy-From': source, 'Content-Length': '0'}", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name obj_info = generate_object(container_name, object_name) headers =", "200 received = object_response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected,", "object_info.get('response') method = 'object creation with etag header' expected =", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Credentials',", "Apache License, Version 2.0 (the \"License\"); you may not use", "= 'object creation with valid object name' expected = 201", "response.headers.get('Access-Control-Allow-Origin') self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Origin header' '", "= 'object update with valid object name' expected = 201", "'true'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method", "'{0}_destination'.format(self.default_obj_name) dest = '/{0}/{1}'.format(dest_container_name, dest_object_name) headers = {'Destination': dest} response", "creation with Access-Control-Allow-Origin header' expected = 201 received = response.status_code", "headers = {\"Transfer-Encoding\": \"chunked\"} create_response = self.client.create_object( container_name, self.default_obj_name, headers=headers,", "generate_object(container_name, object_name) headers = {'If-Modified-Since': 'Fri, 17 Aug 2001 18:44:42", "expected = 200 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format(", "header should be set\") self.assertIn( 'access-control-allow-origin', response.headers, msg=\"access-control-allow-origin header should", "creation with X-Delete-After header' expected = 201 received = response.status_code", "object_name = self.default_obj_name generate_object(container_name, object_name, headers={'X-Object-Meta-Grok': 'Drok'}) response = self.client.get_object_metadata(", "response = object_info.get('response') method = 'object creation with valid object", "self.client.create_object( container_name, self.default_obj_name, headers=headers, data=self.generate_chunk_data()) method = 'Object creation via", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Foo', response.headers,", "time import zlib from hashlib import md5 import unittest from", "= response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received, msg='object X-Object-Meta-Grok header value expected:", "= 'object creation with Access-Control-Max-Age header' expected = 201 received", "object_name) response = ver2_info.get('response') method = 'update version one object'", "self.assertIn( 'etag', response.headers, msg=\"Etag header was set\") if object_type ==", "received, msg='object created with Content-Encoding header value' ' expected: {0}", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object(", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'Range': 'bytes=-4'}", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Method', response.headers, msg=\"Access-Control-Request-Method header", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) updated_object_data =", "= {'Content-Length': updated_content_length, 'Content-Type': CONTENT_TYPES.get('text')} response = self.client.create_object( container_name, self.default_obj_name,", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Credentials', response.headers, msg=\"Access-Control-Allow-Credentials header was", "304 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected,", "self.default_obj_name object_info = generate_object(container_name, object_name) headers = {'If-None-Match': 'grok'} response", "ddtest_object_creation_with_access_control_expose_headers( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "def object_data_op(data, extra_data): data = zlib.compress(data) return (data, extra_data) object_headers", "'object2.txt' object2_headers = {'X-Detect-Content-Type': True} generate_object(container_name, object2_name, headers=object2_headers) response =", "object_name) headers = {'Range': 'bytes=-4'} response = self.client.get_object( container_name, self.default_obj_name,", "'object creation with uppercase etag header' expected = 201 received", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @unittest.skip('Problem with this tests assertion,", "assertion, needs review') @data_driven_test(ObjectDatasetList()) def ddtest_put_copy_object(self, object_type, generate_object): src_container_name =", "{0}' ' received: {1}'.format(expected, received)) response = self.client.get_object( container_name, object2_name)", "= {'X-Copy-From': source, 'Content-Length': '0'} response = self.client.copy_object( dest_container_name, dest_obj_name,", "'bytes=5-'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'partial", "= self.default_obj_name object_headers = {'Access-Control-Allow-Credentials': 'true'} object_info = generate_object(container_name, object_name,", "received = response.headers.get('Access-Control-Expose-Headers') self.assertEqual( expected, received, msg='object created with Access-Control-Expose-Headers", "'object version one creation' expected = 201 received = response.status_code", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_valid_object_name( self, object_type,", "'set object metadata' expected = 202 received = response.status_code self.assertEqual(", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update_with_object_possessing_metadata( self, object_type, generate_object): container_name", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_fails_with_if_unmodified_since( self, object_type, generate_object): container_name = self.create_temp_container(", "retrieval' expected = 200 received = response.status_code self.assertEqual( expected, received,", "self.default_obj_name object_data = \"valid_data\" data_md5 = md5(object_data).hexdigest() upper_etag = data_md5.upper()", "received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_content_disposition( self, object_type, generate_object): container_name", "= generate_object(container_name, object_name) headers = {'If-Match': obj_info.get('etag')} response = self.client.get_object(", "= self.client.set_object_metadata( container_name, object_name, headers=headers) method = 'set object metadata", "received = response.headers.get('Content-Disposition') self.assertEqual( expected, received, msg='object created with Content-Disposition", "2101 18:44:42 GMT'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method", "msg=\"Etag header was set\") if object_type == 'standard': expected =", "= object_info.get('response') method = 'object creation with Access-Control-Request-Method header' expected", "header value' ' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def", "method=method, expected=expected, received=str(received))) @unittest.skip('Problem with this tests assertion, needs review')", "cloudcafe.objectstorage.objectstorage_api.common.constants import \\ Constants from cloudroast.objectstorage.fixtures import ObjectStorageFixture from cloudroast.objectstorage.generators", "= 'copy object' expected = 201 received = response.status_code self.assertEqual(", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_valid_object_name( self, object_type, generate_object):", "self.assertEqual( expected, received, msg='object created with Access-Control-Request-Method header' ' value", "header was set\") expected = 'GET' received = response.headers.get('Access-Control-Request-Method') self.assertEqual(", "ddtest_object_retrieval_with_origin( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "dest_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_obj_name", "response.headers.get('X-Object-Meta-Foo') self.assertEqual( expected, received, msg='object X-Object-Meta-Foo header value expected: {0}'", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Allow-Credentials': 'true'} object_info =", "= 'object creation with Access-Control-Allow-Methods header' expected = 201 received", "response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response_md5 =", "in non-current container response = self.client.list_objects( object_history_container_name) method = 'list", "headers=object_headers) response = object_info.get('response') method = 'object creation with Access-Control-Max-Age", "self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Origin header' ' value", "object_name) method = 'object creation with valid object name' expected", "self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'object should be flagged", "expected=expected, received=str(received))) object_response = self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag', object_response.headers,", "= {'Destination': dest} response = self.client.copy_object( src_container_name, src_object_name, headers=headers) method", "self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag', response.headers, msg=\"Etag header was set\")", "= 'Updated test file data' updated_content_length = str(len(updated_object_data)) headers =", "have content type: {0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_valid_object_name(", "ddtest_object_creation_with_access_control_request_headers( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "= self.client.list_objects( object_history_container_name) method = 'list on empty versioned container'", "container_name, self.default_obj_name, headers=headers) method = 'partial object retrieval with end", "expected, received, msg='object X-Object-Meta-Foo header value expected: {0}' ' received:", "License. \"\"\" import calendar import time import zlib from hashlib", "# Create an object (version 1) object_name = self.default_obj_name ver1_info", "'http://example.com'} response = self.client.get_object_metadata( container_name, object_name, headers=headers) self.assertIn( 'access-control-expose-headers', response.headers,", "self.default_obj_name generate_object(container_name, object_name) updated_object_data = 'Updated test file data' updated_content_length", "'object creation with X-Delete-At header' expected = 201 received =", "{'Range': 'bytes=99-0'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method =", "data_md5 received = object_response.headers.get('etag') self.assertEqual( expected, received, msg='object created with", "= 'application/x-www-form-urlencoded' received = response.headers.get('content-type') self.assertEqual( expected, received, msg='object created", "self.client.list_objects( object_history_container_name) method = 'list on empty versioned container' expected", "self.assertEqual( object_info.get('md5'), response_md5, msg='should return identical object') @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def", "review') @data_driven_test(ObjectDatasetList()) def ddtest_put_copy_object(self, object_type, generate_object): src_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "data_md5.upper() headers = {\"ETag\": upper_etag} create_response = self.client.create_object(container_name, object_name, data=object_data,", "object' expected = 201 received = response.status_code self.assertEqual( expected, received,", "from hashlib import md5 import unittest from cafe.drivers.unittest.decorators import (", "response = object_info.get('response') method = 'object creation with X-Delete-After header'", "= 'object1.txt' object1_headers = {'X-Detect-Content-Type': True, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name,", "self.default_obj_name) response = self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag', response.headers, msg=\"Etag", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Foo', response.headers, msg=\"object", "of If-match Header: https://bugs.launchpad.net/swift/+bug/1279076 \"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.list_objects(object_history_container_name) method", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_copy_object(self,", "software distributed under the License is distributed on an \"AS", "'etag', object_response.headers, msg=\"Etag header was set\") expected = data_md5 received", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) # Create an object (version 1)", "' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update(self, object_type, generate_object): container_name", "self.client.get_object( container_name, object1_name) expected = 'application/x-www-form-urlencoded' received = response.headers.get('content-type') self.assertEqual(", "with Access-Control-Request-Method header' ' value expected: {0} received: {1}'.format( expected,", "ddtest_object_creation_with_delete_after( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "object_info.get('response') method = 'object creation with Access-Control-Allow-Credentials header' expected =", "object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'X-Object-Meta-Grok': 'Drok'} response", "DataDrivenFixture, data_driven_test) from cloudcafe.objectstorage.objectstorage_api.common.constants import \\ Constants from cloudroast.objectstorage.fixtures import", "expected, received, msg='object created with Content-Encoding header value' ' expected:", "@data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_origin( self, object_type, generate_object): container_name = self.create_temp_container(", "type: {0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_detected_with_detect_content_type( self,", "= 'partial object retrieval with end range' expected = 206", "= object_info.get('response') method = 'object creation with Access-Control-Max-Age header' expected", "response.headers.get('Content-Disposition') self.assertEqual( expected, received, msg='object created with Content-Disposition header value'", "'slo'])) def ddtest_object_creation_with_file_compression( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "expected = 'attachment; filename=testdata.txt' received = response.headers.get('Content-Disposition') self.assertEqual( expected, received,", "content type: {0}' ' received: {1}'.format(expected, received)) def test_object_creation_via_chunked_transfer(self): \"\"\"", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object( dest_container_name, dest_obj_name)", "{1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_detected_with_detect_content_type( self, object_type, generate_object): container_name =", "'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) response = self.client.get_object( container_name, object1_name) expected", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) object_response = self.client.get_object(container_name, self.default_obj_name) method", "import time import zlib from hashlib import md5 import unittest", "creation' expected = 201 received = response.status_code self.assertEqual( expected, received,", "method = 'object retrieval with if modified since header (future", "msg=\"Content-Encoding header was set\") expected = 'gzip' received = response.headers.get('Content-Encoding')", "= '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_object_name = '{0}_destination'.format(self.default_obj_name) dest = '/{0}/{1}'.format(dest_container_name,", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response_md5 = md5(response.content).hexdigest() self.assertEqual(", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Delete-At', response.headers, msg=\"X-Delete-At header was set\")", "headers=object_headers) response = object_info.get('response') method = 'object creation with X-Delete-After", "expected, received, msg='object created with Content-Disposition header value' ' expected:", "@data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_retrieval_with_origin( self, object_type, generate_object): container_name = self.create_temp_container(", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'If-Unmodified-Since': 'Fri,", "created with X-Object-Meta-Grok header value' ' expected: {0} received: {1}'.format(expected,", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {'X-Versions-Location': object_history_container_name} self.client.set_container_metadata(container_name, headers=headers) #", "= generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method = 'object", "= {'X-Delete-At': future_time} object_info = generate_object(container_name, object_name, headers=object_headers) response =", "= response.headers.get('Access-Control-Max-Age') self.assertEqual( expected, received, msg='object created with Access-Control-Max-Age header'", "chunk %s\\r\\n\" % i @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_valid_object_name( self, object_type, generate_object):", "= self.client.create_object( container_name, self.default_obj_name, headers=headers, data=self.generate_chunk_data()) method = 'Object creation", "= {'X-Object-Meta-Grok': 'Drok'} response = self.client.set_object_metadata( container_name, object_name, headers=headers) method", "header' expected = 200 received = response.status_code self.assertEqual( expected, received,", "headers=headers) method = 'partial object retrieval with end range' expected", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object updated with X-Object-Meta-Grok", "object_name) headers = {'If-Unmodified-Since': 'Fri, 17 Aug 2001 18:44:42 GMT'}", "etag header' expected = 201 received = response.status_code self.assertEqual( expected,", "self.client.get_object( dest_container_name, dest_object_name) method = 'copied object retrieval' expected =", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_modified_since( self, object_type, generate_object): container_name = self.create_temp_container(", "{'X-Delete-At': future_time} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response')", "headers=object_headers) response = object_info.get('response') method = 'object creation with Access-Control-Allow-Origin", "= {'If-None-Match': object_info.get('etag')} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method", "response.headers.get('Access-Control-Allow-Methods') self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Methods header' '", "object_headers = {'Access-Control-Allow-Credentials': 'true'} object_info = generate_object(container_name, object_name, headers=object_headers) response", "object_name = self.default_obj_name object_headers = {'Access-Control-Request-Headers': 'x-requested-with'} object_info = generate_object(container_name,", "should be set\") expected = 'http://example.com' received = response.headers.get('access-control-allow-origin') self.assertEqual(", "received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_origin( self, object_type,", "self.assertEqual( expected, received, msg='object created with X-Object-Meta-Grok header value' '", "expected = 'http://example.com' received = response.headers.get('Access-Control-Allow-Origin') self.assertEqual( expected, received, msg='object", "= {'X-Detect-Content-Type': False, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object2_name, headers=object2_headers) response =", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers =", "container' expected = 204 received = response.status_code self.assertEqual( expected, received,", "date)' expected = 200 received = response.status_code self.assertEqual( expected, received,", "= {\"ETag\": upper_etag} create_response = self.client.create_object(container_name, object_name, data=object_data, headers=headers) method", "received = response.headers.get('Access-Control-Allow-Origin') self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Origin", "= self.client.get_object( container_name, object1_name) expected = 'application/x-www-form-urlencoded' received = response.headers.get('content-type')", "object1_name, headers=object1_headers) object2_name = 'object2.txt' object2_headers = {'X-Detect-Content-Type': False, 'Content-Type':", "object1_headers = {'X-Detect-Content-Type': True, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object1_name, headers=object1_headers) response", "' received: {1}'.format(expected, received)) def test_object_creation_via_chunked_transfer(self): \"\"\" Scenario: Create an", "generate_object(container_name, object_name) response = self.client.delete_object( container_name, object_name) method = 'delete", "value' ' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_content_disposition(", "received = object_response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received)))", "'Access-Control-Max-Age', response.headers, msg=\"Access-Control-Max-Age header was set\") expected = '5' received", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = { 'Access-Control-Allow-Methods': 'GET, POST,", "generate_object(container_name, object_name) headers = {'X-Object-Meta-Grok': 'Drok'} response = self.client.set_object_metadata( container_name,", "with if match header' expected = 200 received = response.status_code", "self.client.get_object_metadata( container_name, object_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object not created with", "@data_driven_test(ObjectDatasetList()) def ddtest_content_type_detected_with_detect_content_type( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "def ddtest_object_creation_with_access_control_allow_methods( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "self.default_obj_name, headers=headers, data=self.generate_chunk_data()) method = 'Object creation via chunked transfer'", "= { 'Content-Disposition': 'attachment; filename=testdata.txt'} object_info = generate_object(container_name, object_name, headers=object_headers)", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) object_response = self.client.get_object( container_name,", "{1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_content_disposition( self, object_type, generate_object): container_name =", "to in writing, software distributed under the License is distributed", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object( container_name, self.default_obj_name)", "self.default_obj_name headers = {'access-control-allow-origin': 'http://example.com', 'access-control-expose-headers': 'X-Trans-Id'} generate_object(container_name, object_name, headers=headers)", "404 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected,", "generate_object): \"\"\" Bug filed for dlo/slo support of If-match Header:", "have content type: {0}' ' received: {1}'.format(expected, received)) object2_name =", "self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Request-Headers', response.headers, msg=\"Access-Control-Request-Headers header was set\")", "'X-Foo-Header'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response') method", "= object_info.get('response') method = 'object creation with Access-Control-Allow-Methods header' expected", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) # Update an object (version", "\"\"\" import calendar import time import zlib from hashlib import", "= 'list on versioned container' expected = 200 received =", "self.client.list_objects(object_history_container_name) method = 'list on versioned container' expected = 200", "self.default_obj_name) self.assertIn( 'Access-Control-Max-Age', response.headers, msg=\"Access-Control-Max-Age header was set\") expected =", "data_driven_test) from cloudcafe.objectstorage.objectstorage_api.common.constants import \\ Constants from cloudroast.objectstorage.fixtures import ObjectStorageFixture", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @unittest.skip('Problem with this tests", "= {'If-Unmodified-Since': 'Fri, 17 Aug 2101 18:44:42 GMT'} response =", "headers=headers) method = 'object retrieval with if unmodified since header'", "status code and a single object should be created. \"\"\"", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) headers = {'If-None-Match': object_info.get('etag')} response =", "@data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_creation_with_file_compression( self, object_type, generate_object): container_name = self.create_temp_container(", "object_name = self.default_obj_name object_headers = { 'Content-Disposition': 'attachment; filename=testdata.txt'} object_info", "% i @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_valid_object_name( self, object_type, generate_object): container_name =", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_match( self, object_type,", "object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'If-Unmodified-Since': 'Fri, 17", "set\") expected = future_time received = response.headers.get('X-Delete-At') self.assertEqual( expected, received,", "def ddtest_object_creation_with_access_control_request_headers( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_history_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {'X-Versions-Location': object_history_container_name}", "= 'set object metadata' expected = 202 received = response.status_code", "for the specific language governing permissions and limitations under the", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Methods',", "expected: {0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update(self, object_type,", "updated with X-Object-Meta-Foo header\") expected = 'Bar' received = response.headers.get('X-Object-Meta-Foo')", "received, msg='object created with Access-Control-Allow-Origin header' ' value expected: {0}", "X-Object-Meta-Grok header value' ' expected: {0} received: {1}'.format(expected, received)) headers", "response.headers, msg=\"Etag header was set\") if object_type == 'standard': expected", "object name' expected = 200 received = response.status_code self.assertEqual( expected,", "self.assertEqual( expected, received, msg='object created with Content-Encoding header value' '", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) headers = {'If-None-Match': object_info.get('etag')}", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Foo', response.headers, msg=\"object updated with", "object_name, headers={'X-Object-Meta-Grok': 'Drok'}) response = self.client.get_object_metadata( container_name, object_name) self.assertIn( 'X-Object-Meta-Grok',", "self.client.get_object_metadata( container_name, self.default_obj_name) response = self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag',", "was set\") expected = 'true' received = response.headers.get('Access-Control-Allow-Credentials') self.assertEqual( expected,", "method=method, expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Max-Age',", "= self.client.delete_object( container_name, object_name) method = 'delete object' expected =", "response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) # Create", "upper_etag} create_response = self.client.create_object(container_name, object_name, data=object_data, headers=headers) method = 'object", "expected = 'GET' received = response.headers.get('Access-Control-Request-Method') self.assertEqual( expected, received, msg='object", "= self.client.list_objects(object_history_container_name) method = 'list on versioned container' expected =", "self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object not created with X-Object-Meta-Grok header\") expected", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name = 'object1.txt' object1_headers = {'Content-Type': 'application/x-www-form-urlencoded'}", "generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name headers =", "i @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container(", "\"\"\" Bug filed for dlo/slo support of If-match Header: https://bugs.launchpad.net/swift/+bug/1279076", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_start_range( self, object_type, generate_object): container_name", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_end_range( self, object_type, generate_object):", "= self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'object retrieval with", "= { 'Access-Control-Allow-Methods': 'GET, POST, OPTIONS'} object_info = generate_object(container_name, object_name,", "self.assertEqual( expected, received, msg='object created with Access-Control-Allow-Methods header' ' value", "Update an object (version 2) object_name = self.default_obj_name ver2_info =", "'object creation with Access-Control-Allow-Credentials header' expected = 201 received =", "Content-Disposition header value' ' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList())", "object should be created. \"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers", "X-Object-Meta-Grok header\") expected = 'Drok' received = response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected,", "= response.headers.get('X-Object-Meta-Foo') self.assertEqual( expected, received, msg='object X-Object-Meta-Foo header value expected:", "object name' expected = 201 received = response.status_code self.assertEqual( expected,", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_copy_object(self, object_type, generate_object): src_container_name", "self.client.create_object(container_name, object_name, data=object_data, headers=headers) method = 'object creation with uppercase", "ddtest_object_deletion_with_valid_object( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "'Bar' received = response.headers.get('X-Object-Meta-Foo') self.assertEqual( expected, received, msg='object X-Object-Meta-Foo header", "'list on empty versioned container' expected = 204 received =", "self.default_obj_name, headers=headers) method = 'partial object retrieval with end range'", "'X-Trans-Id'} generate_object(container_name, object_name, headers=headers) headers = {'Origin': 'http://example.com'} response =", "def ddtest_obj_metadata_update_with_object_possessing_metadata( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "Expected Results: Return a 201 status code and a single", "version one creation' expected = 201 received = response.status_code self.assertEqual(", "17 Aug 2101 18:44:42 GMT'} response = self.client.get_object( container_name, self.default_obj_name,", "fail with if unmodified' ' since header') expected = 412", "generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_history_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers", "expected, received, msg='object created with Access-Control-Expose-Headers header' ' value expected:", "expected = 'GET, POST, OPTIONS' received = response.headers.get('Access-Control-Allow-Methods') self.assertEqual( expected,", "'update version one object' expected = 201 received = response.status_code", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_match( self,", "received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_not_detected_without_detect_content_type_header( self, object_type, generate_object): container_name = self.create_temp_container(", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Credentials', response.headers,", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Encoding', response.headers, msg=\"Content-Encoding header was", "Create an object (version 1) object_name = self.default_obj_name ver1_info =", "header' expected = 201 received = create_response.status_code self.assertEqual( expected, received,", "created with Access-Control-Allow-Methods header' ' value expected: {0} received: {1}'.format(", "calendar import time import zlib from hashlib import md5 import", "creation with Access-Control-Allow-Methods header' expected = 201 received = response.status_code", "'\"{0}\"'.format(object_info.get('etag')) received = response.headers.get('etag') self.assertEqual( expected, received, msg='object created with", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_data = \"valid_data\" data_md5", "descriptor=CONTAINER_DESCRIPTOR) src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_object_name = '{0}_destination'.format(self.default_obj_name) dest", "generate_object(container_name, object_name) headers = {'Range': 'bytes=5-'} response = self.client.get_object( container_name,", "headers=headers) method = 'object retrieval with if match header' expected", "{'If-None-Match': object_info.get('etag')} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method =", "= 'http://example.com' received = response.headers.get('Access-Control-Allow-Origin') self.assertEqual( expected, received, msg='object created", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) headers = {'Range':", "header should reflect origin' ' expected: {0} received: {1}'.format(expected, received))", "was set\") @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object_versioning') def ddtest_versioned_container_creation_with_valid_data( self, object_type, generate_object): container_name", "response = self.client.set_object_metadata( container_name, self.default_obj_name, headers=headers) method = 'set object", "container_name, self.default_obj_name, headers=headers) method = 'partial object retrieval with start", "object_name) headers = {'If-Match': obj_info.get('etag')} response = self.client.get_object( container_name, self.default_obj_name,", "License for the specific language governing permissions and limitations under", "received = response.headers.get('Access-Control-Request-Headers') self.assertEqual( expected, received, msg='object created with Access-Control-Request-Headers", "ddtest_object_creation_with_file_compression( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "\"\"\" container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name obj_info =", "msg=\"access-control-allow-origin header should be set\") expected = 'http://example.com' received =", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {'X-Versions-Location': object_history_container_name} self.client.set_container_metadata(container_name, headers=headers) # list", "' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_content_disposition( self,", "method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_range( self, object_type, generate_object): container_name", "X-Object-Meta-Foo header\") expected = 'Bar' received = response.headers.get('X-Object-Meta-Foo') self.assertEqual( expected,", "precondition fail with if unmodified' ' since header') expected =", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name def object_data_op(data, extra_data):", "= self.default_obj_name generate_object(container_name, object_name) response = self.client.get_object(container_name, object_name) method =", "def ddtest_object_retrieval_with_if_none_match( self, object_type, generate_object): \"\"\" Bug filed for dlo/slo", "response.headers, msg=\"Access-Control-Allow-Origin header was set\") expected = 'http://example.com' received =", "'Content-Encoding', response.headers, msg=\"Content-Encoding header was set\") expected = 'gzip' received", "md5 import unittest from cafe.drivers.unittest.decorators import ( DataDrivenFixture, data_driven_test) from", "expected=expected, received=str(received))) headers = {'If-None-Match': object_info.get('etag')} response = self.client.get_object( container_name,", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_none_match(", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Grok', response.headers,", "headers = {'access-control-allow-origin': 'http://example.com', 'access-control-expose-headers': 'X-Trans-Id'} generate_object(container_name, object_name, headers=headers) headers", "headers = {'If-Unmodified-Since': 'Fri, 17 Aug 2101 18:44:42 GMT'} response", "Access-Control-Allow-Origin header' ' value expected: {0} received: {1}'.format( expected, received))", "response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Origin', response.headers, msg=\"Access-Control-Allow-Origin header", "== 'standard': expected = object_info.get('etag') else: expected = '\"{0}\"'.format(object_info.get('etag')) received", "def ddtest_object_creation_with_access_control_expose_headers( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "ver2_info.get('response') method = 'update version one object' expected = 201", "container_name, object_name, headers=headers) self.assertIn( 'access-control-expose-headers', response.headers, msg=\"access-control-expose-headers header should be", "Create an object using chunked transfer encoding. Expected Results: Return", "206 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected,", "= 'object retrieval with if none match header' expected =", "method = 'object creation with Access-Control-Allow-Origin header' expected = 201", "src_object_name) hdrs = {'X-Copy-From': source, 'Content-Length': '0'} response = self.client.copy_object(", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Delete-At', response.headers, msg=\"X-Delete-At header was", "header (future date)' expected = 304 received = response.status_code self.assertEqual(", "response = self.client.get_object_metadata( container_name, object_name, headers=headers) self.assertIn( 'access-control-expose-headers', response.headers, msg=\"access-control-expose-headers", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name def object_data_op(data, extra_data): data = zlib.compress(data)", "def ddtest_partial_object_retrieval_with_range( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "headers=headers) method = 'object creation with uppercase etag header' expected", "412 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected,", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Disposition', response.headers, msg=\"Content-Disposition", "import ( DataDrivenFixture, data_driven_test) from cloudcafe.objectstorage.objectstorage_api.common.constants import \\ Constants from", "self.assertEqual( expected, received, msg='object X-Object-Meta-Grok header value expected: {0}' '", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_none_match( self, object_type,", "= 'object creation with Access-Control-Request-Method header' expected = 201 received", "created should have content type: {0}' ' received: {1}'.format(expected, received))", "self.default_obj_name) self.assertIn( 'Access-Control-Allow-Origin', response.headers, msg=\"Access-Control-Allow-Origin header was set\") expected =", "expected = '\"{0}\"'.format(object_info.get('etag')) received = response.headers.get('etag') self.assertEqual( expected, received, msg='object", "expected=expected, received=str(received))) response = self.client.get_object( dest_container_name, dest_object_name) method = 'copied", "self.assertIn( 'etag', object_response.headers, msg=\"Etag header was set\") expected = data_md5", "generate_object(container_name, object_name) headers = {'Range': 'bytes=99-0'} response = self.client.get_object( container_name,", "value' ' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_delete_after(", "set\") expected = 'http://example.com' received = response.headers.get('Access-Control-Allow-Origin') self.assertEqual( expected, received,", "generate_object(container_name, object_name) response = ver1_info.get('response') method = 'object version one", "response.headers, msg=\"Access-Control-Expose-Headers header was set\") expected = 'X-Foo-Header' received =", "= 'object creation with uppercase etag header' expected = 201", "headers=headers, data=updated_object_data) method = 'object update with valid object name'", "self.default_obj_name, headers=headers) method = 'object retrieval with if none match", "object_response.headers, msg=\"Etag header was set\") expected = data_md5 received =", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_modified_since( self,", "and a single object should be created. \"\"\" container_name =", "{0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_methods( self,", "expected=expected, received=str(received))) response = self.client.get_object( dest_container_name, dest_obj_name) method = 'copied", "creation with Access-Control-Request-Method header' expected = 201 received = response.status_code", "headers=headers) method = 'set object metadata X-Object-Meta-Grok: Drok' expected =", "generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name)", "should reflect origin' ' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo',", "set\") if object_type == 'standard': expected = object_info.get('etag') else: expected", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_range( self, object_type, generate_object): container_name = self.create_temp_container(", "return identical object') @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_update_with_valid_object_name( self, object_type, generate_object):", "'etag', response.headers, msg=\"Etag header was set\") if object_type == 'standard':", "expected=expected, received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Content-Disposition', response.headers,", "self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_data = \"valid_data\" data_md5 =", "= ver2_info.get('response') method = 'update version one object' expected =", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response_md5 = md5(response.content).hexdigest() self.assertEqual( object_info.get('md5'), response_md5,", "= 'object retrieval with if modified since header (future date)'", "ddtest_put_copy_object(self, object_type, generate_object): src_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) dest_container_name = self.create_temp_container(", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_deletion_with_valid_object( self,", "'access-control-expose-headers', response.headers, msg=\"access-control-expose-headers header should be set\") self.assertIn( 'access-control-allow-origin', response.headers,", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_etag( self, object_type, generate_object): container_name =", "'Object retrieval' expected = 200 received = object_response.status_code self.assertEqual( expected,", "= {'If-Modified-Since': 'Fri, 17 Aug 2101 18:44:42 GMT'} response =", "Access-Control-Allow-Credentials header' expected = 201 received = response.status_code self.assertEqual( expected,", "= 'delete object' expected = 204 received = response.status_code self.assertEqual(", "was set\") expected = 'attachment; filename=testdata.txt' received = response.headers.get('Content-Disposition') self.assertEqual(", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'X-Delete-After': '60'} object_info =", "object_info.get('response') method = 'object creation with X-Delete-After header' expected =", "generate_object(container_name, object_name) headers = {'If-Unmodified-Since': 'Fri, 17 Aug 2101 18:44:42", "{'Content-Length': updated_content_length, 'Content-Type': CONTENT_TYPES.get('text')} response = self.client.create_object( container_name, self.default_obj_name, headers=headers,", "by applicable law or agreed to in writing, software distributed", "received status code {received}') @DataDrivenFixture class ObjectSmokeTest(ObjectStorageFixture): @classmethod def setUpClass(cls):", "response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received, msg='object created with X-Object-Meta-Grok header value'", "with Access-Control-Expose-Headers header' expected = 201 received = response.status_code self.assertEqual(", "container_name, self.default_obj_name, headers=headers) method = 'partial object retrieval with complete", "= 'object creation with X-Delete-At header' expected = 201 received", "self.assertEqual( expected, received, msg='object created with Access-Control-Expose-Headers header' ' value", "def ddtest_content_type_not_detected_without_detect_content_type_header( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_complete_range( self, object_type, generate_object): container_name =", "should be set\") self.assertIn( 'access-control-allow-origin', response.headers, msg=\"access-control-allow-origin header should be", "def ddtest_object_creation_with_delete_after( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name", "self.default_obj_name generate_object(container_name, object_name) headers = {'If-Modified-Since': 'Fri, 17 Aug 2001", "Etag header' ' value expected: {0} received: {1}'.format( expected, received))", "response = object_info.get('response') method = 'object creation with Access-Control-Allow-Credentials header'", "X-Object-Meta-Grok: Drok' expected = 202 received = response.status_code self.assertEqual( expected,", "expected = 'Bar' received = response.headers.get('X-Object-Meta-Foo') self.assertEqual( expected, received, msg='object", "created with X-Object-Meta-Grok header\") expected = 'Drok' received = response.headers.get('X-Object-Meta-Grok')", "' expected: {0} received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_creation_with_file_compression(", "self.client.set_object_metadata( container_name, object_name, headers=headers) method = 'set object metadata X-Object-Meta-Grok:", "object_name, headers=object_headers) response = object_info.get('response') method = 'object creation with", "container_name, self.default_obj_name, headers=headers) method = 'object retrieval with if none", "src_container_name, src_object_name, headers=headers) method = 'copy object' expected = 201", "have content type: {0}' ' received: {1}'.format(expected, received)) def test_object_creation_via_chunked_transfer(self):", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name generate_object(container_name, object_name) updated_object_data", "object_headers = { 'Access-Control-Allow-Origin': 'http://example.com'} object_info = generate_object(container_name, object_name, headers=object_headers)", "{1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_controle_max_age( self, object_type, generate_object):", "Copyright 2015 Rackspace Licensed under the Apache License, Version 2.0", "ddtest_object_retrieval_with_if_match( self, object_type, generate_object): \"\"\" Bug filed for dlo/slo support", "@data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_valid_object_name( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "object_headers = { 'Content-Disposition': 'attachment; filename=testdata.txt'} object_info = generate_object(container_name, object_name,", "200 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected,", "False, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object2_name, headers=object2_headers) response = self.client.get_object( container_name,", "value expected: {0}' ' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_not_detected_without_detect_content_type_header(", "self.default_obj_name) self.assertIn( 'Content-Encoding', response.headers, msg=\"Content-Encoding header was set\") expected =", "msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) response = self.client.get_object( container_name, self.default_obj_name) method", "'object2.txt' object2_headers = {'X-Detect-Content-Type': False, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object2_name, headers=object2_headers)", "container_name, self.default_obj_name) method = 'object retrieval' expected = 404 received", "object_info.get('response') method = 'object creation with Access-Control-Request-Method header' expected =", "was set\") expected = future_time received = response.headers.get('X-Delete-At') self.assertEqual( expected,", "was set\") expected = 'x-requested-with' received = response.headers.get('Access-Control-Request-Headers') self.assertEqual( expected,", "descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = { 'Content-Disposition': 'attachment; filename=testdata.txt'}", "@data_driven_test(ObjectDatasetList()) def ddtest_object_deletion_with_valid_object( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object1_name = 'object1.txt' object1_headers = {'Content-Type':", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) response = self.client.get_object( container_name,", "with the License. You may obtain a copy of the", "= object_info.get('response') method = 'object creation with etag header' expected", "received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_credentials( self, object_type,", "received, msg='object created with Access-Control-Allow-Methods header' ' value expected: {0}", "method = 'object creation with Access-Control-Request-Method header' expected = 201", "expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def ddtest_object_retrieval_with_if_match(", "header was set\") @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object_versioning') def ddtest_versioned_container_creation_with_valid_data( self, object_type, generate_object):", "{1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_not_detected_without_detect_content_type_header( self, object_type, generate_object): container_name =", "= {'If-Unmodified-Since': 'Fri, 17 Aug 2001 18:44:42 GMT'} response =", "have content type: {0}' ' received: {1}'.format(expected, received)) response =", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'X-Object-Meta-Grok', response.headers, msg=\"object updated with", "received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_partial_object_retrieval_with_end_range( self, object_type,", "{0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_origin( self,", "response = self.client.list_objects(object_history_container_name) method = 'list on versioned container' expected", "received=str(received))) response = self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Credentials', response.headers, msg=\"Access-Control-Allow-Credentials", "descriptor=CONTAINER_DESCRIPTOR) object_history_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) headers = {'X-Versions-Location': object_history_container_name} self.client.set_container_metadata(container_name,", "container_name, self.default_obj_name, headers=headers) method = 'object should be flagged as", "container_name, self.default_obj_name) self.assertIn( 'Access-Control-Expose-Headers', response.headers, msg=\"Access-Control-Expose-Headers header was set\") expected", "= data_md5 received = object_response.headers.get('etag') self.assertEqual( expected, received, msg='object created", "'{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_obj_name = '{0}_destination'.format(self.default_obj_name) source = '/{0}/{1}'.format(src_container_name, src_object_name)", "version one object' expected = 201 received = response.status_code self.assertEqual(", "self.assertEqual( expected, received, msg='object created with X-Delete-At header value' '", "obj_info = generate_object(container_name, object_name) headers = {'If-Match': obj_info.get('etag')} response =", "received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_etag( self, object_type, generate_object): container_name = self.create_temp_container(", "self.assertIn( 'Access-Control-Request-Method', response.headers, msg=\"Access-Control-Request-Method header was set\") expected = 'GET'", "with start range' expected = 206 received = response.status_code self.assertEqual(", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) # Create an", "response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @unittest.skip('Problem with", "'x-requested-with' received = response.headers.get('Access-Control-Request-Headers') self.assertEqual( expected, received, msg='object created with", "= self.default_obj_name object_headers = { 'Access-Control-Allow-Methods': 'GET, POST, OPTIONS'} object_info", "expected, received, msg='object created with Access-Control-Request-Headers header' ' value expected:", "ddtest_object_retrieval_with_if_unmodified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name =", "response = object_info.get('response') method = 'object creation with Access-Control-Max-Age header'", "with end range' expected = 206 received = response.status_code self.assertEqual(", "range' expected = 206 received = response.status_code self.assertEqual( expected, received,", "' received: {1}'.format(expected, received)) @data_driven_test(ObjectDatasetList()) def ddtest_content_type_not_detected_without_detect_content_type_header( self, object_type, generate_object):", "received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo'])) def test_object_creation_with_uppercase_etag(self): container_name =", "future_time = str(int(start_time + 60)) object_headers = {'X-Delete-At': future_time} object_info", "msg='object X-Object-Meta-Foo header value expected: {0}' ' received: {1}'.format(expected, received))", "self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_fails_with_if_unmodified_since(", "response = self.client.create_object( container_name, self.default_obj_name, headers=headers, data=updated_object_data) method = 'object", "dest_container_name, dest_obj_name, headers=hdrs) method = 'put copy object' expected =", "@data_driven_test(ObjectDatasetList()) def ddtest_obj_metadata_update_with_object_possessing_metadata( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)", "received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_request_method( self, object_type,", "expected, received, msg='object created with Access-Control-Allow-Methods header' ' value expected:", "= self.client.get_object_metadata( container_name, self.default_obj_name) self.assertIn( 'Access-Control-Allow-Methods', response.headers, msg=\"Access-Control-Allow-Methods header was", "= 'gzip' received = response.headers.get('Content-Encoding') self.assertEqual( expected, received, msg='object created", "headers = {'If-Unmodified-Since': 'Fri, 17 Aug 2001 18:44:42 GMT'} response", "headers=headers) method = ('object retrieval precondition fail with if unmodified'", "response.headers, msg=\"object updated with X-Object-Meta-Foo header\") expected = 'Bar' received", "complete range' expected = 200 received = response.status_code self.assertEqual( expected,", "response = ver2_info.get('response') method = 'update version one object' expected", "= 202 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method,", "'GET, POST, OPTIONS' received = response.headers.get('Access-Control-Allow-Methods') self.assertEqual( expected, received, msg='object", "= response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received))) @unittest.skip('Problem", "dest_container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_object_name", "header\") expected = 'Drok' received = response.headers.get('X-Object-Meta-Grok') self.assertEqual( expected, received,", "= 'Object retrieval' expected = 200 received = object_response.status_code self.assertEqual(", "ddtest_object_retrieval_with_if_none_match( self, object_type, generate_object): \"\"\" Bug filed for dlo/slo support", "self.client.get_object( container_name, object2_name) expected = 'text/plain' received = response.headers.get('content-type') self.assertEqual(", "@data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_creation_with_access_control_allow_credentials( self, object_type, generate_object): container_name = self.create_temp_container(", "= 204 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method,", "header was set\") expected = 'X-Foo-Header' received = response.headers.get('Access-Control-Expose-Headers') self.assertEqual(", "with if unmodified since header' expected = 200 received =", "'grok'} response = self.client.get_object( container_name, self.default_obj_name, headers=headers) method = 'object", "received)) def test_object_creation_via_chunked_transfer(self): \"\"\" Scenario: Create an object using chunked", "{'X-Detect-Content-Type': False, 'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object2_name, headers=object2_headers) response = self.client.get_object(", "retrieval with if unmodified since header' expected = 200 received", "= self.client.get_object( container_name, self.default_obj_name) self.assertIn( 'etag', object_response.headers, msg=\"Etag header was", "Access-Control-Allow-Credentials header' ' value expected: {0} received: {1}'.format( expected, received))", "License. You may obtain a copy of the License at", "descriptor=CONTAINER_DESCRIPTOR) headers = {\"Transfer-Encoding\": \"chunked\"} create_response = self.client.create_object( container_name, self.default_obj_name,", "'attachment; filename=testdata.txt'} object_info = generate_object(container_name, object_name, headers=object_headers) response = object_info.get('response')", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) src_object_name = '{0}_source'.format(self.default_obj_name) generate_object(src_container_name, src_object_name) dest_object_name =", "src_object_name) dest_obj_name = '{0}_destination'.format(self.default_obj_name) source = '/{0}/{1}'.format(src_container_name, src_object_name) hdrs =", "= self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR) object_name = self.default_obj_name object_headers = {'Access-Control-Expose-Headers': 'X-Foo-Header'}", "msg='object created with Access-Control-Max-Age header' ' value expected: {0} received:", "self.default_obj_name object_headers = {'Access-Control-Allow-Credentials': 'true'} object_info = generate_object(container_name, object_name, headers=object_headers)", "return (data, extra_data) object_headers = {'Content-Encoding': 'gzip'} object_info = generate_object(container_name,", "objects in non-current container response = self.client.list_objects( object_history_container_name) method =", "expected=expected, received=str(received))) @data_driven_test(ObjectDatasetList()) def ddtest_object_creation_with_valid_object_name( self, object_type, generate_object): container_name =", "{0} received: {1}'.format( expected, received)) @data_driven_test(ObjectDatasetList()) @ObjectStorageFixture.required_features('object-cors') def ddtest_object_retrieval_with_origin( self,", "object_headers = {'Access-Control-Request-Method': 'GET'} object_info = generate_object(container_name, object_name, headers=object_headers) response", "'X-Delete-At', response.headers, msg=\"X-Delete-At header was set\") expected = future_time received", "= 206 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method,", "'Content-Type': 'application/x-www-form-urlencoded'} generate_object(container_name, object2_name, headers=object2_headers) response = self.client.get_object( container_name, object1_name)", "start range' expected = 206 received = response.status_code self.assertEqual( expected,", "object_info.get('etag') else: expected = '\"{0}\"'.format(object_info.get('etag')) received = response.headers.get('etag') self.assertEqual( expected,", "expected: {0} received: {1}'.format(expected, received)) headers = {'X-Object-Meta-Foo': 'Bar'} response", "@data_driven_test(ObjectDatasetList()) def ddtest_object_retrieval_with_if_modified_since( self, object_type, generate_object): container_name = self.create_temp_container( descriptor=CONTAINER_DESCRIPTOR)" ]
[ "BSD style license (see LICENSE) import os import pytest from", "os.path.join(HERE, 'compose', 'docker-compose.yaml') # We need a custom condition to", "import CheckDockerLogs from datadog_checks.dev.subprocess import run_command from .common import BASIC_CONFIG,", "'spawning ceph --cluster ceph -w', wait=5), CheckDockerLogs(compose_file, 'Running on http://0.0.0.0:5000/'),", "bit longer with docker_run( compose_file=compose_file, conditions=[ CheckDockerLogs(compose_file, 'spawning ceph --cluster", "from .common import BASIC_CONFIG, HERE E2E_METADATA = { 'start_commands': [", ") # Wait a bit for the change to take", "dd_environment(): compose_file = os.path.join(HERE, 'compose', 'docker-compose.yaml') # We need a", "[ 'apt-get update', 'apt-get install -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" -y", "'--mon_data_avail_warn', '5'] ) # Wait a bit for the change", "datadog_checks.dev.subprocess import run_command from .common import BASIC_CONFIG, HERE E2E_METADATA =", "os import pytest from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import", "from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs from datadog_checks.dev.subprocess", "Dpkg::Options::=\"--force-confold\" -y docker.io', ], 'docker_volumes': ['/var/run/docker.sock:/var/run/docker.sock'], } @pytest.fixture(scope=\"session\") def dd_environment():", "disk space warning run_command( ['docker', 'exec', 'dd-test-ceph', 'ceph', 'tell', 'mon.*',", "import docker_run from datadog_checks.dev.conditions import CheckDockerLogs from datadog_checks.dev.subprocess import run_command", "condition = CheckDockerLogs(compose_file, 'Cluster is now healthy') condition() yield BASIC_CONFIG,", "license (see LICENSE) import os import pytest from datadog_checks.dev import", "warning run_command( ['docker', 'exec', 'dd-test-ceph', 'ceph', 'tell', 'mon.*', 'injectargs', '--mon_data_avail_warn',", "from datadog_checks.dev.conditions import CheckDockerLogs from datadog_checks.dev.subprocess import run_command from .common", "'dd-test-ceph', 'ceph', 'tell', 'mon.*', 'injectargs', '--mon_data_avail_warn', '5'] ) # Wait", "3-clause BSD style license (see LICENSE) import os import pytest", "custom condition to wait a bit longer with docker_run( compose_file=compose_file,", "conditions=[ CheckDockerLogs(compose_file, 'spawning ceph --cluster ceph -w', wait=5), CheckDockerLogs(compose_file, 'Running", "a bit longer with docker_run( compose_file=compose_file, conditions=[ CheckDockerLogs(compose_file, 'spawning ceph", "# Licensed under a 3-clause BSD style license (see LICENSE)", "datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs from datadog_checks.dev.subprocess import", "'apt-get update', 'apt-get install -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" -y docker.io',", "], 'docker_volumes': ['/var/run/docker.sock:/var/run/docker.sock'], } @pytest.fixture(scope=\"session\") def dd_environment(): compose_file = os.path.join(HERE,", "HERE E2E_METADATA = { 'start_commands': [ 'apt-get update', 'apt-get install", "Datadog, Inc. 2018-present # All rights reserved # Licensed under", "style license (see LICENSE) import os import pytest from datadog_checks.dev", "bit for the change to take effect condition = CheckDockerLogs(compose_file,", "http://0.0.0.0:5000/'), ], ): # Clean the disk space warning run_command(", "# (C) Datadog, Inc. 2018-present # All rights reserved #", "datadog_checks.dev.conditions import CheckDockerLogs from datadog_checks.dev.subprocess import run_command from .common import", "wait a bit longer with docker_run( compose_file=compose_file, conditions=[ CheckDockerLogs(compose_file, 'spawning", "on http://0.0.0.0:5000/'), ], ): # Clean the disk space warning", "} @pytest.fixture(scope=\"session\") def dd_environment(): compose_file = os.path.join(HERE, 'compose', 'docker-compose.yaml') #", "docker_run( compose_file=compose_file, conditions=[ CheckDockerLogs(compose_file, 'spawning ceph --cluster ceph -w', wait=5),", "], ): # Clean the disk space warning run_command( ['docker',", "We need a custom condition to wait a bit longer", "the change to take effect condition = CheckDockerLogs(compose_file, 'Cluster is", "run_command from .common import BASIC_CONFIG, HERE E2E_METADATA = { 'start_commands':", "a 3-clause BSD style license (see LICENSE) import os import", "a bit for the change to take effect condition =", "'mon.*', 'injectargs', '--mon_data_avail_warn', '5'] ) # Wait a bit for", "# Wait a bit for the change to take effect", "All rights reserved # Licensed under a 3-clause BSD style", "'apt-get install -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" -y docker.io', ], 'docker_volumes':", "longer with docker_run( compose_file=compose_file, conditions=[ CheckDockerLogs(compose_file, 'spawning ceph --cluster ceph", "CheckDockerLogs(compose_file, 'spawning ceph --cluster ceph -w', wait=5), CheckDockerLogs(compose_file, 'Running on", "install -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" -y docker.io', ], 'docker_volumes': ['/var/run/docker.sock:/var/run/docker.sock'],", "compose_file = os.path.join(HERE, 'compose', 'docker-compose.yaml') # We need a custom", "2018-present # All rights reserved # Licensed under a 3-clause", "under a 3-clause BSD style license (see LICENSE) import os", "# Clean the disk space warning run_command( ['docker', 'exec', 'dd-test-ceph',", "import pytest from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs", "(see LICENSE) import os import pytest from datadog_checks.dev import docker_run", "Wait a bit for the change to take effect condition", "ceph -w', wait=5), CheckDockerLogs(compose_file, 'Running on http://0.0.0.0:5000/'), ], ): #", "E2E_METADATA = { 'start_commands': [ 'apt-get update', 'apt-get install -o", "'docker-compose.yaml') # We need a custom condition to wait a", "= { 'start_commands': [ 'apt-get update', 'apt-get install -o Dpkg::Options::=\"--force-confdef\"", "space warning run_command( ['docker', 'exec', 'dd-test-ceph', 'ceph', 'tell', 'mon.*', 'injectargs',", "def dd_environment(): compose_file = os.path.join(HERE, 'compose', 'docker-compose.yaml') # We need", "'compose', 'docker-compose.yaml') # We need a custom condition to wait", "Licensed under a 3-clause BSD style license (see LICENSE) import", "Clean the disk space warning run_command( ['docker', 'exec', 'dd-test-ceph', 'ceph',", "docker_run from datadog_checks.dev.conditions import CheckDockerLogs from datadog_checks.dev.subprocess import run_command from", "-o Dpkg::Options::=\"--force-confold\" -y docker.io', ], 'docker_volumes': ['/var/run/docker.sock:/var/run/docker.sock'], } @pytest.fixture(scope=\"session\") def", "rights reserved # Licensed under a 3-clause BSD style license", "run_command( ['docker', 'exec', 'dd-test-ceph', 'ceph', 'tell', 'mon.*', 'injectargs', '--mon_data_avail_warn', '5']", "with docker_run( compose_file=compose_file, conditions=[ CheckDockerLogs(compose_file, 'spawning ceph --cluster ceph -w',", "to wait a bit longer with docker_run( compose_file=compose_file, conditions=[ CheckDockerLogs(compose_file,", "for the change to take effect condition = CheckDockerLogs(compose_file, 'Cluster", "to take effect condition = CheckDockerLogs(compose_file, 'Cluster is now healthy')", "ceph --cluster ceph -w', wait=5), CheckDockerLogs(compose_file, 'Running on http://0.0.0.0:5000/'), ],", "-o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" -y docker.io', ], 'docker_volumes': ['/var/run/docker.sock:/var/run/docker.sock'], }", "a custom condition to wait a bit longer with docker_run(", "Inc. 2018-present # All rights reserved # Licensed under a", "effect condition = CheckDockerLogs(compose_file, 'Cluster is now healthy') condition() yield", "BASIC_CONFIG, HERE E2E_METADATA = { 'start_commands': [ 'apt-get update', 'apt-get", "['/var/run/docker.sock:/var/run/docker.sock'], } @pytest.fixture(scope=\"session\") def dd_environment(): compose_file = os.path.join(HERE, 'compose', 'docker-compose.yaml')", "= CheckDockerLogs(compose_file, 'Cluster is now healthy') condition() yield BASIC_CONFIG, E2E_METADATA", "# All rights reserved # Licensed under a 3-clause BSD", "): # Clean the disk space warning run_command( ['docker', 'exec',", "{ 'start_commands': [ 'apt-get update', 'apt-get install -o Dpkg::Options::=\"--force-confdef\" -o", "(C) Datadog, Inc. 2018-present # All rights reserved # Licensed", "import os import pytest from datadog_checks.dev import docker_run from datadog_checks.dev.conditions", "'5'] ) # Wait a bit for the change to", "-w', wait=5), CheckDockerLogs(compose_file, 'Running on http://0.0.0.0:5000/'), ], ): # Clean", "condition to wait a bit longer with docker_run( compose_file=compose_file, conditions=[", "--cluster ceph -w', wait=5), CheckDockerLogs(compose_file, 'Running on http://0.0.0.0:5000/'), ], ):", ".common import BASIC_CONFIG, HERE E2E_METADATA = { 'start_commands': [ 'apt-get", "docker.io', ], 'docker_volumes': ['/var/run/docker.sock:/var/run/docker.sock'], } @pytest.fixture(scope=\"session\") def dd_environment(): compose_file =", "@pytest.fixture(scope=\"session\") def dd_environment(): compose_file = os.path.join(HERE, 'compose', 'docker-compose.yaml') # We", "import BASIC_CONFIG, HERE E2E_METADATA = { 'start_commands': [ 'apt-get update',", "need a custom condition to wait a bit longer with", "'tell', 'mon.*', 'injectargs', '--mon_data_avail_warn', '5'] ) # Wait a bit", "change to take effect condition = CheckDockerLogs(compose_file, 'Cluster is now", "'ceph', 'tell', 'mon.*', 'injectargs', '--mon_data_avail_warn', '5'] ) # Wait a", "'injectargs', '--mon_data_avail_warn', '5'] ) # Wait a bit for the", "-y docker.io', ], 'docker_volumes': ['/var/run/docker.sock:/var/run/docker.sock'], } @pytest.fixture(scope=\"session\") def dd_environment(): compose_file", "compose_file=compose_file, conditions=[ CheckDockerLogs(compose_file, 'spawning ceph --cluster ceph -w', wait=5), CheckDockerLogs(compose_file,", "<reponame>remicalixte/integrations-core # (C) Datadog, Inc. 2018-present # All rights reserved", "'start_commands': [ 'apt-get update', 'apt-get install -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\"", "wait=5), CheckDockerLogs(compose_file, 'Running on http://0.0.0.0:5000/'), ], ): # Clean the", "'exec', 'dd-test-ceph', 'ceph', 'tell', 'mon.*', 'injectargs', '--mon_data_avail_warn', '5'] ) #", "LICENSE) import os import pytest from datadog_checks.dev import docker_run from", "['docker', 'exec', 'dd-test-ceph', 'ceph', 'tell', 'mon.*', 'injectargs', '--mon_data_avail_warn', '5'] )", "reserved # Licensed under a 3-clause BSD style license (see", "the disk space warning run_command( ['docker', 'exec', 'dd-test-ceph', 'ceph', 'tell',", "take effect condition = CheckDockerLogs(compose_file, 'Cluster is now healthy') condition()", "CheckDockerLogs(compose_file, 'Running on http://0.0.0.0:5000/'), ], ): # Clean the disk", "'docker_volumes': ['/var/run/docker.sock:/var/run/docker.sock'], } @pytest.fixture(scope=\"session\") def dd_environment(): compose_file = os.path.join(HERE, 'compose',", "# We need a custom condition to wait a bit", "'Running on http://0.0.0.0:5000/'), ], ): # Clean the disk space", "from datadog_checks.dev.subprocess import run_command from .common import BASIC_CONFIG, HERE E2E_METADATA", "pytest from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs from", "update', 'apt-get install -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" -y docker.io', ],", "Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" -y docker.io', ], 'docker_volumes': ['/var/run/docker.sock:/var/run/docker.sock'], } @pytest.fixture(scope=\"session\")", "import run_command from .common import BASIC_CONFIG, HERE E2E_METADATA = {", "= os.path.join(HERE, 'compose', 'docker-compose.yaml') # We need a custom condition", "CheckDockerLogs from datadog_checks.dev.subprocess import run_command from .common import BASIC_CONFIG, HERE" ]
[ "old Hubzilla versions url = doc.get('0', {}).get('href') elif isinstance(doc.get('links'), dict):", "json.JSONDecodeError: return if response.status_code == 200: return parse_misskey_document(doc, host, mastodon_document=mastodon_document)", "isinstance(doc.get('links'), dict): # Another buggy NodeInfo from certain old Hubzilla", "highest_version < version <= HIGHEST_SUPPORTED_NODEINFO_VERSION: url, highest_version = link.get('href'), version", "path='/statistics.json') if not doc: return try: doc = json.loads(doc) except", "json.loads(doc) except json.JSONDecodeError: return return parse_nodeinfo_document(doc, host) def fetch_nodeinfo2_document(host): doc,", "parse_nodeinfo2_document, parse_statisticsjson_document, parse_mastodon_document, parse_matrix_document, parse_misskey_document) from federation.utils.network import fetch_document HIGHEST_SUPPORTED_NODEINFO_VERSION", "doc = json.loads(doc) except json.JSONDecodeError: return return parse_mastodon_document(doc, host) def", "except json.JSONDecodeError: return if response.status_code == 200: return parse_misskey_document(doc, host,", "def fetch_mastodon_document(host): doc, status_code, error = fetch_document(host=host, path='/api/v1/instance') if not", "<= HIGHEST_SUPPORTED_NODEINFO_VERSION: url, highest_version = link.get('href'), version if not url:", "fetch_matrix_document(host: str) -> Optional[Dict]: doc, status_code, error = fetch_document(host=host, path='/_matrix/federation/v1/version')", "def fetch_statisticsjson_document(host): doc, status_code, error = fetch_document(host=host, path='/statistics.json') if not", "def fetch_nodeinfo2_document(host): doc, status_code, error = fetch_document(host=host, path='/.well-known/x-nodeinfo2') if not", "return return parse_nodeinfo_document(doc, host) def fetch_nodeinfo2_document(host): doc, status_code, error =", "try: doc = json.loads(doc) except json.JSONDecodeError: return return parse_mastodon_document(doc, host)", "= fetch_document(host=host, path='/api/v1/instance') if not doc: return try: doc =", "== 200: return parse_misskey_document(doc, host, mastodon_document=mastodon_document) def fetch_nodeinfo_document(host): doc, status_code,", "= json.loads(doc) except json.JSONDecodeError: return return parse_nodeinfo2_document(doc, host) def fetch_statisticsjson_document(host):", "Hubzilla versions url = doc.get('0', {}).get('href') elif isinstance(doc.get('links'), dict): #", "versions url = doc.get('links').get('href') else: for link in doc.get('links'): version", "from typing import Dict, Optional import requests from federation.hostmeta.parsers import", "= fetch_document(host=host, path='/_matrix/federation/v1/version') if not doc: return try: doc =", "return try: doc = json.loads(doc) except json.JSONDecodeError: return return parse_matrix_document(doc,", "# Buggy NodeInfo from certain old Hubzilla versions url =", "fetch_document(host=host, path='/.well-known/nodeinfo') if not doc: return try: doc = json.loads(doc)", "doc, status_code, error = fetch_document(host=host, path='/api/v1/instance') if not doc: return", "except json.JSONDecodeError: return url, highest_version = '', 0.0 if doc.get('0'):", "200: return parse_misskey_document(doc, host, mastodon_document=mastodon_document) def fetch_nodeinfo_document(host): doc, status_code, error", "link in doc.get('links'): version = float(link.get('rel').split('/')[-1]) if highest_version < version", "host) def fetch_matrix_document(host: str) -> Optional[Dict]: doc, status_code, error =", "# ¯\\_(ツ)_/¯ except Exception: return try: doc = response.json() except", "doc.get('links').get('href') else: for link in doc.get('links'): version = float(link.get('rel').split('/')[-1]) if", "path='/api/v1/instance') if not doc: return try: doc = json.loads(doc) except", "def fetch_misskey_document(host: str, mastodon_document: Dict=None) -> Optional[Dict]: try: response =", "status_code, error = fetch_document(host=host, path='/api/v1/instance') if not doc: return try:", "= float(link.get('rel').split('/')[-1]) if highest_version < version <= HIGHEST_SUPPORTED_NODEINFO_VERSION: url, highest_version", "requests from federation.hostmeta.parsers import ( parse_nodeinfo_document, parse_nodeinfo2_document, parse_statisticsjson_document, parse_mastodon_document, parse_matrix_document,", "version <= HIGHEST_SUPPORTED_NODEINFO_VERSION: url, highest_version = link.get('href'), version if not", "parse_misskey_document(doc, host, mastodon_document=mastodon_document) def fetch_nodeinfo_document(host): doc, status_code, error = fetch_document(host=host,", "= doc.get('0', {}).get('href') elif isinstance(doc.get('links'), dict): # Another buggy NodeInfo", "Another buggy NodeInfo from certain old Hubzilla versions url =", "= fetch_document(host=host, path='/.well-known/x-nodeinfo2') if not doc: return try: doc =", "Exception: return try: doc = response.json() except json.JSONDecodeError: return if", "'', 0.0 if doc.get('0'): # Buggy NodeInfo from certain old", "Buggy NodeInfo from certain old Hubzilla versions url = doc.get('0',", "except json.JSONDecodeError: return return parse_nodeinfo_document(doc, host) def fetch_nodeinfo2_document(host): doc, status_code,", "except json.JSONDecodeError: return return parse_mastodon_document(doc, host) def fetch_matrix_document(host: str) ->", "not url: return doc, status_code, error = fetch_document(url=url) if not", "= 2.1 def fetch_mastodon_document(host): doc, status_code, error = fetch_document(host=host, path='/api/v1/instance')", "if not doc: return try: doc = json.loads(doc) except json.JSONDecodeError:", "dict): # Another buggy NodeInfo from certain old Hubzilla versions", "try: doc = json.loads(doc) except json.JSONDecodeError: return return parse_nodeinfo2_document(doc, host)", "typing import Dict, Optional import requests from federation.hostmeta.parsers import (", "doc.get('links'): version = float(link.get('rel').split('/')[-1]) if highest_version < version <= HIGHEST_SUPPORTED_NODEINFO_VERSION:", "buggy NodeInfo from certain old Hubzilla versions url = doc.get('links').get('href')", "doc = json.loads(doc) except json.JSONDecodeError: return url, highest_version = '',", "return return parse_nodeinfo2_document(doc, host) def fetch_statisticsjson_document(host): doc, status_code, error =", "( parse_nodeinfo_document, parse_nodeinfo2_document, parse_statisticsjson_document, parse_mastodon_document, parse_matrix_document, parse_misskey_document) from federation.utils.network import", "response.status_code == 200: return parse_misskey_document(doc, host, mastodon_document=mastodon_document) def fetch_nodeinfo_document(host): doc,", "fetch_document(url=url) if not doc: return try: doc = json.loads(doc) except", "parse_statisticsjson_document, parse_mastodon_document, parse_matrix_document, parse_misskey_document) from federation.utils.network import fetch_document HIGHEST_SUPPORTED_NODEINFO_VERSION =", "doc, status_code, error = fetch_document(url=url) if not doc: return try:", "parse_misskey_document) from federation.utils.network import fetch_document HIGHEST_SUPPORTED_NODEINFO_VERSION = 2.1 def fetch_mastodon_document(host):", "return return parse_mastodon_document(doc, host) def fetch_matrix_document(host: str) -> Optional[Dict]: doc,", "= fetch_document(host=host, path='/statistics.json') if not doc: return try: doc =", "= requests.post(f'https://{host}/api/meta') # ¯\\_(ツ)_/¯ except Exception: return try: doc =", "-> Optional[Dict]: doc, status_code, error = fetch_document(host=host, path='/_matrix/federation/v1/version') if not", "from federation.hostmeta.parsers import ( parse_nodeinfo_document, parse_nodeinfo2_document, parse_statisticsjson_document, parse_mastodon_document, parse_matrix_document, parse_misskey_document)", "highest_version = '', 0.0 if doc.get('0'): # Buggy NodeInfo from", "not doc: return try: doc = json.loads(doc) except json.JSONDecodeError: return", "host) def fetch_nodeinfo2_document(host): doc, status_code, error = fetch_document(host=host, path='/.well-known/x-nodeinfo2') if", "mastodon_document=mastodon_document) def fetch_nodeinfo_document(host): doc, status_code, error = fetch_document(host=host, path='/.well-known/nodeinfo') if", "json.loads(doc) except json.JSONDecodeError: return url, highest_version = '', 0.0 if", "return parse_matrix_document(doc, host) def fetch_misskey_document(host: str, mastodon_document: Dict=None) -> Optional[Dict]:", "mastodon_document: Dict=None) -> Optional[Dict]: try: response = requests.post(f'https://{host}/api/meta') # ¯\\_(ツ)_/¯", "federation.hostmeta.parsers import ( parse_nodeinfo_document, parse_nodeinfo2_document, parse_statisticsjson_document, parse_mastodon_document, parse_matrix_document, parse_misskey_document) from", "return try: doc = json.loads(doc) except json.JSONDecodeError: return return parse_nodeinfo_document(doc,", "status_code, error = fetch_document(host=host, path='/statistics.json') if not doc: return try:", "except Exception: return try: doc = response.json() except json.JSONDecodeError: return", "= json.loads(doc) except json.JSONDecodeError: return return parse_nodeinfo_document(doc, host) def fetch_nodeinfo2_document(host):", "def fetch_nodeinfo_document(host): doc, status_code, error = fetch_document(host=host, path='/.well-known/nodeinfo') if not", "import fetch_document HIGHEST_SUPPORTED_NODEINFO_VERSION = 2.1 def fetch_mastodon_document(host): doc, status_code, error", "in doc.get('links'): version = float(link.get('rel').split('/')[-1]) if highest_version < version <=", "return doc, status_code, error = fetch_document(url=url) if not doc: return", "fetch_document(host=host, path='/.well-known/x-nodeinfo2') if not doc: return try: doc = json.loads(doc)", "doc, status_code, error = fetch_document(host=host, path='/.well-known/nodeinfo') if not doc: return", "json.JSONDecodeError: return url, highest_version = '', 0.0 if doc.get('0'): #", "url = doc.get('0', {}).get('href') elif isinstance(doc.get('links'), dict): # Another buggy", "fetch_document(host=host, path='/api/v1/instance') if not doc: return try: doc = json.loads(doc)", "try: response = requests.post(f'https://{host}/api/meta') # ¯\\_(ツ)_/¯ except Exception: return try:", "doc: return try: doc = json.loads(doc) except json.JSONDecodeError: return url,", "json.JSONDecodeError: return return parse_mastodon_document(doc, host) def fetch_matrix_document(host: str) -> Optional[Dict]:", "url, highest_version = link.get('href'), version if not url: return doc,", "parse_nodeinfo_document(doc, host) def fetch_nodeinfo2_document(host): doc, status_code, error = fetch_document(host=host, path='/.well-known/x-nodeinfo2')", "error = fetch_document(host=host, path='/.well-known/x-nodeinfo2') if not doc: return try: doc", "json.JSONDecodeError: return return parse_nodeinfo2_document(doc, host) def fetch_statisticsjson_document(host): doc, status_code, error", "fetch_mastodon_document(host): doc, status_code, error = fetch_document(host=host, path='/api/v1/instance') if not doc:", "doc = json.loads(doc) except json.JSONDecodeError: return return parse_nodeinfo2_document(doc, host) def", "doc = json.loads(doc) except json.JSONDecodeError: return return parse_nodeinfo_document(doc, host) def", "try: doc = json.loads(doc) except json.JSONDecodeError: return return parse_nodeinfo_document(doc, host)", "try: doc = json.loads(doc) except json.JSONDecodeError: return return parse_statisticsjson_document(doc, host)", "try: doc = response.json() except json.JSONDecodeError: return if response.status_code ==", "if not url: return doc, status_code, error = fetch_document(url=url) if", "json.JSONDecodeError: return return parse_nodeinfo_document(doc, host) def fetch_nodeinfo2_document(host): doc, status_code, error", "return parse_nodeinfo2_document(doc, host) def fetch_statisticsjson_document(host): doc, status_code, error = fetch_document(host=host,", "doc = json.loads(doc) except json.JSONDecodeError: return return parse_matrix_document(doc, host) def", "doc, status_code, error = fetch_document(host=host, path='/statistics.json') if not doc: return", "json from typing import Dict, Optional import requests from federation.hostmeta.parsers", "status_code, error = fetch_document(url=url) if not doc: return try: doc", "import ( parse_nodeinfo_document, parse_nodeinfo2_document, parse_statisticsjson_document, parse_mastodon_document, parse_matrix_document, parse_misskey_document) from federation.utils.network", "path='/.well-known/x-nodeinfo2') if not doc: return try: doc = json.loads(doc) except", "version if not url: return doc, status_code, error = fetch_document(url=url)", "for link in doc.get('links'): version = float(link.get('rel').split('/')[-1]) if highest_version <", "error = fetch_document(url=url) if not doc: return try: doc =", "return try: doc = json.loads(doc) except json.JSONDecodeError: return return parse_nodeinfo2_document(doc,", "status_code, error = fetch_document(host=host, path='/.well-known/x-nodeinfo2') if not doc: return try:", "doc.get('0'): # Buggy NodeInfo from certain old Hubzilla versions url", "= fetch_document(host=host, path='/.well-known/nodeinfo') if not doc: return try: doc =", "return if response.status_code == 200: return parse_misskey_document(doc, host, mastodon_document=mastodon_document) def", "return parse_nodeinfo_document(doc, host) def fetch_nodeinfo2_document(host): doc, status_code, error = fetch_document(host=host,", "doc.get('0', {}).get('href') elif isinstance(doc.get('links'), dict): # Another buggy NodeInfo from", "= json.loads(doc) except json.JSONDecodeError: return return parse_mastodon_document(doc, host) def fetch_matrix_document(host:", "return try: doc = json.loads(doc) except json.JSONDecodeError: return return parse_statisticsjson_document(doc,", "def fetch_matrix_document(host: str) -> Optional[Dict]: doc, status_code, error = fetch_document(host=host,", "else: for link in doc.get('links'): version = float(link.get('rel').split('/')[-1]) if highest_version", "if highest_version < version <= HIGHEST_SUPPORTED_NODEINFO_VERSION: url, highest_version = link.get('href'),", "certain old Hubzilla versions url = doc.get('0', {}).get('href') elif isinstance(doc.get('links'),", "json.loads(doc) except json.JSONDecodeError: return return parse_nodeinfo2_document(doc, host) def fetch_statisticsjson_document(host): doc,", "error = fetch_document(host=host, path='/statistics.json') if not doc: return try: doc", "highest_version = link.get('href'), version if not url: return doc, status_code,", "= fetch_document(url=url) if not doc: return try: doc = json.loads(doc)", "HIGHEST_SUPPORTED_NODEINFO_VERSION: url, highest_version = link.get('href'), version if not url: return", "path='/_matrix/federation/v1/version') if not doc: return try: doc = json.loads(doc) except", "try: doc = json.loads(doc) except json.JSONDecodeError: return return parse_matrix_document(doc, host)", "host) def fetch_statisticsjson_document(host): doc, status_code, error = fetch_document(host=host, path='/statistics.json') if", "fetch_nodeinfo_document(host): doc, status_code, error = fetch_document(host=host, path='/.well-known/nodeinfo') if not doc:", "url: return doc, status_code, error = fetch_document(url=url) if not doc:", "parse_nodeinfo2_document(doc, host) def fetch_statisticsjson_document(host): doc, status_code, error = fetch_document(host=host, path='/statistics.json')", "float(link.get('rel').split('/')[-1]) if highest_version < version <= HIGHEST_SUPPORTED_NODEINFO_VERSION: url, highest_version =", "federation.utils.network import fetch_document HIGHEST_SUPPORTED_NODEINFO_VERSION = 2.1 def fetch_mastodon_document(host): doc, status_code,", "fetch_document(host=host, path='/statistics.json') if not doc: return try: doc = json.loads(doc)", "{}).get('href') elif isinstance(doc.get('links'), dict): # Another buggy NodeInfo from certain", "-> Optional[Dict]: try: response = requests.post(f'https://{host}/api/meta') # ¯\\_(ツ)_/¯ except Exception:", "error = fetch_document(host=host, path='/_matrix/federation/v1/version') if not doc: return try: doc", "Optional[Dict]: doc, status_code, error = fetch_document(host=host, path='/_matrix/federation/v1/version') if not doc:", "response = requests.post(f'https://{host}/api/meta') # ¯\\_(ツ)_/¯ except Exception: return try: doc", "Optional import requests from federation.hostmeta.parsers import ( parse_nodeinfo_document, parse_nodeinfo2_document, parse_statisticsjson_document,", "2.1 def fetch_mastodon_document(host): doc, status_code, error = fetch_document(host=host, path='/api/v1/instance') if", "= json.loads(doc) except json.JSONDecodeError: return url, highest_version = '', 0.0", "parse_matrix_document(doc, host) def fetch_misskey_document(host: str, mastodon_document: Dict=None) -> Optional[Dict]: try:", "import Dict, Optional import requests from federation.hostmeta.parsers import ( parse_nodeinfo_document,", "= response.json() except json.JSONDecodeError: return if response.status_code == 200: return", "versions url = doc.get('0', {}).get('href') elif isinstance(doc.get('links'), dict): # Another", "return parse_misskey_document(doc, host, mastodon_document=mastodon_document) def fetch_nodeinfo_document(host): doc, status_code, error =", "0.0 if doc.get('0'): # Buggy NodeInfo from certain old Hubzilla", "link.get('href'), version if not url: return doc, status_code, error =", "= json.loads(doc) except json.JSONDecodeError: return return parse_matrix_document(doc, host) def fetch_misskey_document(host:", "status_code, error = fetch_document(host=host, path='/_matrix/federation/v1/version') if not doc: return try:", "NodeInfo from certain old Hubzilla versions url = doc.get('links').get('href') else:", "certain old Hubzilla versions url = doc.get('links').get('href') else: for link", "= doc.get('links').get('href') else: for link in doc.get('links'): version = float(link.get('rel').split('/')[-1])", "url, highest_version = '', 0.0 if doc.get('0'): # Buggy NodeInfo", "json.loads(doc) except json.JSONDecodeError: return return parse_matrix_document(doc, host) def fetch_misskey_document(host: str,", "elif isinstance(doc.get('links'), dict): # Another buggy NodeInfo from certain old", "HIGHEST_SUPPORTED_NODEINFO_VERSION = 2.1 def fetch_mastodon_document(host): doc, status_code, error = fetch_document(host=host,", "except json.JSONDecodeError: return return parse_matrix_document(doc, host) def fetch_misskey_document(host: str, mastodon_document:", "Dict, Optional import requests from federation.hostmeta.parsers import ( parse_nodeinfo_document, parse_nodeinfo2_document,", "host) def fetch_misskey_document(host: str, mastodon_document: Dict=None) -> Optional[Dict]: try: response", "doc, status_code, error = fetch_document(host=host, path='/.well-known/x-nodeinfo2') if not doc: return", "Dict=None) -> Optional[Dict]: try: response = requests.post(f'https://{host}/api/meta') # ¯\\_(ツ)_/¯ except", "return return parse_matrix_document(doc, host) def fetch_misskey_document(host: str, mastodon_document: Dict=None) ->", "NodeInfo from certain old Hubzilla versions url = doc.get('0', {}).get('href')", "return url, highest_version = '', 0.0 if doc.get('0'): # Buggy", "import json from typing import Dict, Optional import requests from", "except json.JSONDecodeError: return return parse_nodeinfo2_document(doc, host) def fetch_statisticsjson_document(host): doc, status_code,", "requests.post(f'https://{host}/api/meta') # ¯\\_(ツ)_/¯ except Exception: return try: doc = response.json()", "import requests from federation.hostmeta.parsers import ( parse_nodeinfo_document, parse_nodeinfo2_document, parse_statisticsjson_document, parse_mastodon_document,", "parse_nodeinfo_document, parse_nodeinfo2_document, parse_statisticsjson_document, parse_mastodon_document, parse_matrix_document, parse_misskey_document) from federation.utils.network import fetch_document", "doc: return try: doc = json.loads(doc) except json.JSONDecodeError: return return", "return try: doc = json.loads(doc) except json.JSONDecodeError: return return parse_mastodon_document(doc,", "if response.status_code == 200: return parse_misskey_document(doc, host, mastodon_document=mastodon_document) def fetch_nodeinfo_document(host):", "return try: doc = json.loads(doc) except json.JSONDecodeError: return url, highest_version", "status_code, error = fetch_document(host=host, path='/.well-known/nodeinfo') if not doc: return try:", "doc, status_code, error = fetch_document(host=host, path='/_matrix/federation/v1/version') if not doc: return", "response.json() except json.JSONDecodeError: return if response.status_code == 200: return parse_misskey_document(doc,", "doc = response.json() except json.JSONDecodeError: return if response.status_code == 200:", "host, mastodon_document=mastodon_document) def fetch_nodeinfo_document(host): doc, status_code, error = fetch_document(host=host, path='/.well-known/nodeinfo')", "error = fetch_document(host=host, path='/api/v1/instance') if not doc: return try: doc", "< version <= HIGHEST_SUPPORTED_NODEINFO_VERSION: url, highest_version = link.get('href'), version if", "from federation.utils.network import fetch_document HIGHEST_SUPPORTED_NODEINFO_VERSION = 2.1 def fetch_mastodon_document(host): doc,", "= link.get('href'), version if not url: return doc, status_code, error", "return try: doc = response.json() except json.JSONDecodeError: return if response.status_code", "fetch_nodeinfo2_document(host): doc, status_code, error = fetch_document(host=host, path='/.well-known/x-nodeinfo2') if not doc:", "fetch_document HIGHEST_SUPPORTED_NODEINFO_VERSION = 2.1 def fetch_mastodon_document(host): doc, status_code, error =", "parse_mastodon_document(doc, host) def fetch_matrix_document(host: str) -> Optional[Dict]: doc, status_code, error", "fetch_statisticsjson_document(host): doc, status_code, error = fetch_document(host=host, path='/statistics.json') if not doc:", "return parse_mastodon_document(doc, host) def fetch_matrix_document(host: str) -> Optional[Dict]: doc, status_code,", "str) -> Optional[Dict]: doc, status_code, error = fetch_document(host=host, path='/_matrix/federation/v1/version') if", "Optional[Dict]: try: response = requests.post(f'https://{host}/api/meta') # ¯\\_(ツ)_/¯ except Exception: return", "= '', 0.0 if doc.get('0'): # Buggy NodeInfo from certain", "str, mastodon_document: Dict=None) -> Optional[Dict]: try: response = requests.post(f'https://{host}/api/meta') #", "error = fetch_document(host=host, path='/.well-known/nodeinfo') if not doc: return try: doc", "from certain old Hubzilla versions url = doc.get('links').get('href') else: for", "json.loads(doc) except json.JSONDecodeError: return return parse_mastodon_document(doc, host) def fetch_matrix_document(host: str)", "parse_mastodon_document, parse_matrix_document, parse_misskey_document) from federation.utils.network import fetch_document HIGHEST_SUPPORTED_NODEINFO_VERSION = 2.1", "fetch_document(host=host, path='/_matrix/federation/v1/version') if not doc: return try: doc = json.loads(doc)", "try: doc = json.loads(doc) except json.JSONDecodeError: return url, highest_version =", "parse_matrix_document, parse_misskey_document) from federation.utils.network import fetch_document HIGHEST_SUPPORTED_NODEINFO_VERSION = 2.1 def", "Hubzilla versions url = doc.get('links').get('href') else: for link in doc.get('links'):", "¯\\_(ツ)_/¯ except Exception: return try: doc = response.json() except json.JSONDecodeError:", "if doc.get('0'): # Buggy NodeInfo from certain old Hubzilla versions", "url = doc.get('links').get('href') else: for link in doc.get('links'): version =", "old Hubzilla versions url = doc.get('links').get('href') else: for link in", "# Another buggy NodeInfo from certain old Hubzilla versions url", "version = float(link.get('rel').split('/')[-1]) if highest_version < version <= HIGHEST_SUPPORTED_NODEINFO_VERSION: url,", "path='/.well-known/nodeinfo') if not doc: return try: doc = json.loads(doc) except", "fetch_misskey_document(host: str, mastodon_document: Dict=None) -> Optional[Dict]: try: response = requests.post(f'https://{host}/api/meta')", "json.JSONDecodeError: return return parse_matrix_document(doc, host) def fetch_misskey_document(host: str, mastodon_document: Dict=None)", "from certain old Hubzilla versions url = doc.get('0', {}).get('href') elif" ]
[ "parser = argparse.ArgumentParser() parser.add_argument( \"--data_dir\", default=\"\", type=str, help=\"Directory where the", "_std = round(np.std(t),2) _min = round(np.min(t),2) _max = round(np.max(t),2) _q25", "_q75 = round(np.quantile(t, 0.75),2) print (f'Feature: {l}') print ('\\tmean:{} |", "default='ekf', type=str, help=\"Filter to process the data noise. (ekf/none/ekf-savgol/savgol\", choices=['ekf',", "'mean_acc', 'mean_deac', 'std_jy'] for i, l in zip(range(0, traj.shape[1]), labels):", "os.path.exists(os.path.join(args.data_dir, data_file)),\\ f'[Analysis][main][ERROR] data_file not found!({data_file})' print ('[Analysis] loading dataset....')", "i, l in zip(range(0, traj.shape[1]), labels): t = traj[:, i]", "_max, _q25, _q50, _q75)) if __name__== '__main__': #_filters = ['none',", "found!({args.data_dir})' data_file = 'features_{}_{}s_{}.npy'.format(args.mode, args.obs_len, args.filter) assert os.path.exists(os.path.join(args.data_dir, data_file)),\\ f'[Analysis][main][ERROR]", "min max #quantile : 0.25, 0.5, 0.75 labels = ['mean_v',", "q50:{} | q75:{}'.format(_mean, _std, _min, _max, _q25, _q50, _q75)) if", "'val', 'test', 'sample'] #_obs_len = [2,5] #seg = _obs_len[0] #mode", "round(np.max(t),2) _q25 = round(np.quantile(t, 0.25),2) _q50 = round(np.quantile(t, 0.5),2) _q75", "import Any, Dict, List, Tuple, NoReturn import argparse import os", "(m, 4) # [mean_v, mean_acc, mean_deac, std_jy] data = np.load(os.path.join(args.data_dir,data_file))", "the data noise. (ekf/none/ekf-savgol/savgol\", choices=['ekf', 'none', 'ekf-savgol', 'savgol']) return parser.parse_args()", "= round(np.quantile(t, 0.5),2) _q75 = round(np.quantile(t, 0.75),2) print (f'Feature: {l}')", "_std, _min, _max, _q25, _q50, _q75)) if __name__== '__main__': #_filters", "type=str, help=\"train/val/test/sample\", choices=['train', 'test', 'val','sample']) parser.add_argument(\"--obs_len\", default=2, type=int, help=\"Observed length", "'none', 'ekf-savgol', 'savgol']) return parser.parse_args() def stats(traj:np.ndarray) -> NoReturn: #central", "(ekf/none/ekf-savgol/savgol\", choices=['ekf', 'none', 'ekf-savgol', 'savgol']) return parser.parse_args() def stats(traj:np.ndarray) ->", "required=True, type=str, help=\"train/val/test/sample\", choices=['train', 'test', 'val','sample']) parser.add_argument(\"--obs_len\", default=2, type=int, help=\"Observed", "-> NoReturn: #central tendency : mean #dispersion : std #bounds", "#filter_name = _filters[0] args = parse_arguments() if args.mode == 'test':", "= round(np.min(t),2) _max = round(np.max(t),2) _q25 = round(np.quantile(t, 0.25),2) _q50", "'ekf-savgol', 'savgol']) return parser.parse_args() def stats(traj:np.ndarray) -> NoReturn: #central tendency", "mode:{} | filter:{} | obs_len:{}'.format(args.mode, args.filter, args.obs_len)) print ('[Analysis] data", "saved\", ) parser.add_argument(\"--mode\", required=True, type=str, help=\"train/val/test/sample\", choices=['train', 'test', 'val','sample']) parser.add_argument(\"--obs_len\",", "line arguments.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"--data_dir\", default=\"\", type=str, help=\"Directory", "found!({data_file})' print ('[Analysis] loading dataset....') # (m, 4) # [mean_v,", "= ['none', 'ekf', 'savgol', 'ekf-savgol'] #_modes = ['train', 'val', 'test',", "files) are saved\", ) parser.add_argument(\"--mode\", required=True, type=str, help=\"train/val/test/sample\", choices=['train', 'test',", "= round(np.max(t),2) _q25 = round(np.quantile(t, 0.25),2) _q50 = round(np.quantile(t, 0.5),2)", "help=\"Observed length of the trajectory in seconds\", choices=[1,2,3,4,5]) parser.add_argument(\"--filter\", default='ekf',", "type=str, help=\"Filter to process the data noise. (ekf/none/ekf-savgol/savgol\", choices=['ekf', 'none',", "#seg = _obs_len[0] #mode = _modes[3] #filter_name = _filters[0] args", "= round(np.quantile(t, 0.75),2) print (f'Feature: {l}') print ('\\tmean:{} | std:{}", "= _modes[3] #filter_name = _filters[0] args = parse_arguments() if args.mode", "'__main__': #_filters = ['none', 'ekf', 'savgol', 'ekf-savgol'] #_modes = ['train',", "in zip(range(0, traj.shape[1]), labels): t = traj[:, i] _mean =", "_q25 = round(np.quantile(t, 0.25),2) _q50 = round(np.quantile(t, 0.5),2) _q75 =", "= _filters[0] args = parse_arguments() if args.mode == 'test': args.obs_len", "data_file not found!({data_file})' print ('[Analysis] loading dataset....') # (m, 4)", "_filters[0] args = parse_arguments() if args.mode == 'test': args.obs_len =", "Any: \"\"\"Parse command line arguments.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"--data_dir\",", "assert os.path.exists(args.data_dir),\\ f'[Analysis][main][ERROR] data_dir not found!({args.data_dir})' data_file = 'features_{}_{}s_{}.npy'.format(args.mode, args.obs_len,", "'test', 'sample'] #_obs_len = [2,5] #seg = _obs_len[0] #mode =", "#central tendency : mean #dispersion : std #bounds : min", "help=\"train/val/test/sample\", choices=['train', 'test', 'val','sample']) parser.add_argument(\"--obs_len\", default=2, type=int, help=\"Observed length of", "<gh_stars>1-10 import numpy as np from typing import Any, Dict,", "default=\"\", type=str, help=\"Directory where the features (npy files) are saved\",", "default=2, type=int, help=\"Observed length of the trajectory in seconds\", choices=[1,2,3,4,5])", "obs_len:{}'.format(args.mode, args.filter, args.obs_len)) print ('[Analysis] data shape:{}'.format(data.shape)) print ('[Analysis] stats:')", "to process the data noise. (ekf/none/ekf-savgol/savgol\", choices=['ekf', 'none', 'ekf-savgol', 'savgol'])", "if __name__== '__main__': #_filters = ['none', 'ekf', 'savgol', 'ekf-savgol'] #_modes", "parser.parse_args() def stats(traj:np.ndarray) -> NoReturn: #central tendency : mean #dispersion", "_max = round(np.max(t),2) _q25 = round(np.quantile(t, 0.25),2) _q50 = round(np.quantile(t,", "length of the trajectory in seconds\", choices=[1,2,3,4,5]) parser.add_argument(\"--filter\", default='ekf', type=str,", "as np from typing import Any, Dict, List, Tuple, NoReturn", "parser.add_argument( \"--data_dir\", default=\"\", type=str, help=\"Directory where the features (npy files)", "filter:{} | obs_len:{}'.format(args.mode, args.filter, args.obs_len)) print ('[Analysis] data shape:{}'.format(data.shape)) print", "min:{} | max:{} | q25:{} | q50:{} | q75:{}'.format(_mean, _std,", ": mean #dispersion : std #bounds : min max #quantile", "std_jy] data = np.load(os.path.join(args.data_dir,data_file)) print ('[Analysis] mode:{} | filter:{} |", "round(np.quantile(t, 0.25),2) _q50 = round(np.quantile(t, 0.5),2) _q75 = round(np.quantile(t, 0.75),2)", "List, Tuple, NoReturn import argparse import os def parse_arguments() ->", ": min max #quantile : 0.25, 0.5, 0.75 labels =", "data noise. (ekf/none/ekf-savgol/savgol\", choices=['ekf', 'none', 'ekf-savgol', 'savgol']) return parser.parse_args() def", "round(np.quantile(t, 0.5),2) _q75 = round(np.quantile(t, 0.75),2) print (f'Feature: {l}') print", "args.filter) assert os.path.exists(os.path.join(args.data_dir, data_file)),\\ f'[Analysis][main][ERROR] data_file not found!({data_file})' print ('[Analysis]", "round(np.std(t),2) _min = round(np.min(t),2) _max = round(np.max(t),2) _q25 = round(np.quantile(t,", "_min = round(np.min(t),2) _max = round(np.max(t),2) _q25 = round(np.quantile(t, 0.25),2)", "[2,5] #seg = _obs_len[0] #mode = _modes[3] #filter_name = _filters[0]", "not found!({data_file})' print ('[Analysis] loading dataset....') # (m, 4) #", "data_file = 'features_{}_{}s_{}.npy'.format(args.mode, args.obs_len, args.filter) assert os.path.exists(os.path.join(args.data_dir, data_file)),\\ f'[Analysis][main][ERROR] data_file", "parse_arguments() -> Any: \"\"\"Parse command line arguments.\"\"\" parser = argparse.ArgumentParser()", "type=int, help=\"Observed length of the trajectory in seconds\", choices=[1,2,3,4,5]) parser.add_argument(\"--filter\",", "_modes[3] #filter_name = _filters[0] args = parse_arguments() if args.mode ==", "parse_arguments() if args.mode == 'test': args.obs_len = 2 assert os.path.exists(args.data_dir),\\", "= ['mean_v', 'mean_acc', 'mean_deac', 'std_jy'] for i, l in zip(range(0,", "of the trajectory in seconds\", choices=[1,2,3,4,5]) parser.add_argument(\"--filter\", default='ekf', type=str, help=\"Filter", "| min:{} | max:{} | q25:{} | q50:{} | q75:{}'.format(_mean,", "for i, l in zip(range(0, traj.shape[1]), labels): t = traj[:,", "#_filters = ['none', 'ekf', 'savgol', 'ekf-savgol'] #_modes = ['train', 'val',", "max #quantile : 0.25, 0.5, 0.75 labels = ['mean_v', 'mean_acc',", "NoReturn import argparse import os def parse_arguments() -> Any: \"\"\"Parse", "the trajectory in seconds\", choices=[1,2,3,4,5]) parser.add_argument(\"--filter\", default='ekf', type=str, help=\"Filter to", "round(np.quantile(t, 0.75),2) print (f'Feature: {l}') print ('\\tmean:{} | std:{} |", "__name__== '__main__': #_filters = ['none', 'ekf', 'savgol', 'ekf-savgol'] #_modes =", "4) # [mean_v, mean_acc, mean_deac, std_jy] data = np.load(os.path.join(args.data_dir,data_file)) print", "Tuple, NoReturn import argparse import os def parse_arguments() -> Any:", "_obs_len[0] #mode = _modes[3] #filter_name = _filters[0] args = parse_arguments()", "== 'test': args.obs_len = 2 assert os.path.exists(args.data_dir),\\ f'[Analysis][main][ERROR] data_dir not", "choices=['ekf', 'none', 'ekf-savgol', 'savgol']) return parser.parse_args() def stats(traj:np.ndarray) -> NoReturn:", "parser.add_argument(\"--mode\", required=True, type=str, help=\"train/val/test/sample\", choices=['train', 'test', 'val','sample']) parser.add_argument(\"--obs_len\", default=2, type=int,", "command line arguments.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"--data_dir\", default=\"\", type=str,", "= [2,5] #seg = _obs_len[0] #mode = _modes[3] #filter_name =", "choices=[1,2,3,4,5]) parser.add_argument(\"--filter\", default='ekf', type=str, help=\"Filter to process the data noise.", "data = np.load(os.path.join(args.data_dir,data_file)) print ('[Analysis] mode:{} | filter:{} | obs_len:{}'.format(args.mode,", "= 2 assert os.path.exists(args.data_dir),\\ f'[Analysis][main][ERROR] data_dir not found!({args.data_dir})' data_file =", "'mean_deac', 'std_jy'] for i, l in zip(range(0, traj.shape[1]), labels): t", "(f'Feature: {l}') print ('\\tmean:{} | std:{} | min:{} | max:{}", "l in zip(range(0, traj.shape[1]), labels): t = traj[:, i] _mean", "= round(np.mean(t),2) _std = round(np.std(t),2) _min = round(np.min(t),2) _max =", "mean_deac, std_jy] data = np.load(os.path.join(args.data_dir,data_file)) print ('[Analysis] mode:{} | filter:{}", "# (m, 4) # [mean_v, mean_acc, mean_deac, std_jy] data =", "-> Any: \"\"\"Parse command line arguments.\"\"\" parser = argparse.ArgumentParser() parser.add_argument(", "('\\tmean:{} | std:{} | min:{} | max:{} | q25:{} |", "0.25),2) _q50 = round(np.quantile(t, 0.5),2) _q75 = round(np.quantile(t, 0.75),2) print", "f'[Analysis][main][ERROR] data_dir not found!({args.data_dir})' data_file = 'features_{}_{}s_{}.npy'.format(args.mode, args.obs_len, args.filter) assert", "'features_{}_{}s_{}.npy'.format(args.mode, args.obs_len, args.filter) assert os.path.exists(os.path.join(args.data_dir, data_file)),\\ f'[Analysis][main][ERROR] data_file not found!({data_file})'", "std:{} | min:{} | max:{} | q25:{} | q50:{} |", "args.mode == 'test': args.obs_len = 2 assert os.path.exists(args.data_dir),\\ f'[Analysis][main][ERROR] data_dir", "stats(traj:np.ndarray) -> NoReturn: #central tendency : mean #dispersion : std", "#_obs_len = [2,5] #seg = _obs_len[0] #mode = _modes[3] #filter_name", "the features (npy files) are saved\", ) parser.add_argument(\"--mode\", required=True, type=str,", "2 assert os.path.exists(args.data_dir),\\ f'[Analysis][main][ERROR] data_dir not found!({args.data_dir})' data_file = 'features_{}_{}s_{}.npy'.format(args.mode,", "data_dir not found!({args.data_dir})' data_file = 'features_{}_{}s_{}.npy'.format(args.mode, args.obs_len, args.filter) assert os.path.exists(os.path.join(args.data_dir,", "args.filter, args.obs_len)) print ('[Analysis] data shape:{}'.format(data.shape)) print ('[Analysis] stats:') stats(data)", "zip(range(0, traj.shape[1]), labels): t = traj[:, i] _mean = round(np.mean(t),2)", "0.5),2) _q75 = round(np.quantile(t, 0.75),2) print (f'Feature: {l}') print ('\\tmean:{}", "choices=['train', 'test', 'val','sample']) parser.add_argument(\"--obs_len\", default=2, type=int, help=\"Observed length of the", "i] _mean = round(np.mean(t),2) _std = round(np.std(t),2) _min = round(np.min(t),2)", "args.obs_len, args.filter) assert os.path.exists(os.path.join(args.data_dir, data_file)),\\ f'[Analysis][main][ERROR] data_file not found!({data_file})' print", "_q25, _q50, _q75)) if __name__== '__main__': #_filters = ['none', 'ekf',", "def parse_arguments() -> Any: \"\"\"Parse command line arguments.\"\"\" parser =", "= np.load(os.path.join(args.data_dir,data_file)) print ('[Analysis] mode:{} | filter:{} | obs_len:{}'.format(args.mode, args.filter,", "type=str, help=\"Directory where the features (npy files) are saved\", )", "np from typing import Any, Dict, List, Tuple, NoReturn import", "argparse import os def parse_arguments() -> Any: \"\"\"Parse command line", "#dispersion : std #bounds : min max #quantile : 0.25,", "(npy files) are saved\", ) parser.add_argument(\"--mode\", required=True, type=str, help=\"train/val/test/sample\", choices=['train',", "_q50, _q75)) if __name__== '__main__': #_filters = ['none', 'ekf', 'savgol',", "os def parse_arguments() -> Any: \"\"\"Parse command line arguments.\"\"\" parser", "= argparse.ArgumentParser() parser.add_argument( \"--data_dir\", default=\"\", type=str, help=\"Directory where the features", "features (npy files) are saved\", ) parser.add_argument(\"--mode\", required=True, type=str, help=\"train/val/test/sample\",", "import argparse import os def parse_arguments() -> Any: \"\"\"Parse command", "mean_acc, mean_deac, std_jy] data = np.load(os.path.join(args.data_dir,data_file)) print ('[Analysis] mode:{} |", "'ekf-savgol'] #_modes = ['train', 'val', 'test', 'sample'] #_obs_len = [2,5]", "= traj[:, i] _mean = round(np.mean(t),2) _std = round(np.std(t),2) _min", "'val','sample']) parser.add_argument(\"--obs_len\", default=2, type=int, help=\"Observed length of the trajectory in", "f'[Analysis][main][ERROR] data_file not found!({data_file})' print ('[Analysis] loading dataset....') # (m,", "#quantile : 0.25, 0.5, 0.75 labels = ['mean_v', 'mean_acc', 'mean_deac',", "print ('[Analysis] loading dataset....') # (m, 4) # [mean_v, mean_acc,", "'sample'] #_obs_len = [2,5] #seg = _obs_len[0] #mode = _modes[3]", "\"\"\"Parse command line arguments.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"--data_dir\", default=\"\",", "typing import Any, Dict, List, Tuple, NoReturn import argparse import", "traj[:, i] _mean = round(np.mean(t),2) _std = round(np.std(t),2) _min =", "round(np.min(t),2) _max = round(np.max(t),2) _q25 = round(np.quantile(t, 0.25),2) _q50 =", "0.75 labels = ['mean_v', 'mean_acc', 'mean_deac', 'std_jy'] for i, l", "if args.mode == 'test': args.obs_len = 2 assert os.path.exists(args.data_dir),\\ f'[Analysis][main][ERROR]", "np.load(os.path.join(args.data_dir,data_file)) print ('[Analysis] mode:{} | filter:{} | obs_len:{}'.format(args.mode, args.filter, args.obs_len))", "tendency : mean #dispersion : std #bounds : min max", "= ['train', 'val', 'test', 'sample'] #_obs_len = [2,5] #seg =", "| q50:{} | q75:{}'.format(_mean, _std, _min, _max, _q25, _q50, _q75))", "mean #dispersion : std #bounds : min max #quantile :", "'test', 'val','sample']) parser.add_argument(\"--obs_len\", default=2, type=int, help=\"Observed length of the trajectory", "'savgol']) return parser.parse_args() def stats(traj:np.ndarray) -> NoReturn: #central tendency :", "_min, _max, _q25, _q50, _q75)) if __name__== '__main__': #_filters =", "labels): t = traj[:, i] _mean = round(np.mean(t),2) _std =", "_mean = round(np.mean(t),2) _std = round(np.std(t),2) _min = round(np.min(t),2) _max", "#bounds : min max #quantile : 0.25, 0.5, 0.75 labels", "\"--data_dir\", default=\"\", type=str, help=\"Directory where the features (npy files) are", "'savgol', 'ekf-savgol'] #_modes = ['train', 'val', 'test', 'sample'] #_obs_len =", "import numpy as np from typing import Any, Dict, List,", "= _obs_len[0] #mode = _modes[3] #filter_name = _filters[0] args =", ") parser.add_argument(\"--mode\", required=True, type=str, help=\"train/val/test/sample\", choices=['train', 'test', 'val','sample']) parser.add_argument(\"--obs_len\", default=2,", "('[Analysis] loading dataset....') # (m, 4) # [mean_v, mean_acc, mean_deac,", "= 'features_{}_{}s_{}.npy'.format(args.mode, args.obs_len, args.filter) assert os.path.exists(os.path.join(args.data_dir, data_file)),\\ f'[Analysis][main][ERROR] data_file not", "assert os.path.exists(os.path.join(args.data_dir, data_file)),\\ f'[Analysis][main][ERROR] data_file not found!({data_file})' print ('[Analysis] loading", "trajectory in seconds\", choices=[1,2,3,4,5]) parser.add_argument(\"--filter\", default='ekf', type=str, help=\"Filter to process", "| std:{} | min:{} | max:{} | q25:{} | q50:{}", "numpy as np from typing import Any, Dict, List, Tuple,", "loading dataset....') # (m, 4) # [mean_v, mean_acc, mean_deac, std_jy]", "parser.add_argument(\"--obs_len\", default=2, type=int, help=\"Observed length of the trajectory in seconds\",", "print ('\\tmean:{} | std:{} | min:{} | max:{} | q25:{}", "| q25:{} | q50:{} | q75:{}'.format(_mean, _std, _min, _max, _q25,", "= parse_arguments() if args.mode == 'test': args.obs_len = 2 assert", "data_file)),\\ f'[Analysis][main][ERROR] data_file not found!({data_file})' print ('[Analysis] loading dataset....') #", "= round(np.std(t),2) _min = round(np.min(t),2) _max = round(np.max(t),2) _q25 =", "= round(np.quantile(t, 0.25),2) _q50 = round(np.quantile(t, 0.5),2) _q75 = round(np.quantile(t,", "q75:{}'.format(_mean, _std, _min, _max, _q25, _q50, _q75)) if __name__== '__main__':", "traj.shape[1]), labels): t = traj[:, i] _mean = round(np.mean(t),2) _std", "'std_jy'] for i, l in zip(range(0, traj.shape[1]), labels): t =", "in seconds\", choices=[1,2,3,4,5]) parser.add_argument(\"--filter\", default='ekf', type=str, help=\"Filter to process the", "#_modes = ['train', 'val', 'test', 'sample'] #_obs_len = [2,5] #seg", "seconds\", choices=[1,2,3,4,5]) parser.add_argument(\"--filter\", default='ekf', type=str, help=\"Filter to process the data", "| obs_len:{}'.format(args.mode, args.filter, args.obs_len)) print ('[Analysis] data shape:{}'.format(data.shape)) print ('[Analysis]", "| q75:{}'.format(_mean, _std, _min, _max, _q25, _q50, _q75)) if __name__==", "process the data noise. (ekf/none/ekf-savgol/savgol\", choices=['ekf', 'none', 'ekf-savgol', 'savgol']) return", "_q75)) if __name__== '__main__': #_filters = ['none', 'ekf', 'savgol', 'ekf-savgol']", "'test': args.obs_len = 2 assert os.path.exists(args.data_dir),\\ f'[Analysis][main][ERROR] data_dir not found!({args.data_dir})'", "args.obs_len = 2 assert os.path.exists(args.data_dir),\\ f'[Analysis][main][ERROR] data_dir not found!({args.data_dir})' data_file", "{l}') print ('\\tmean:{} | std:{} | min:{} | max:{} |", "labels = ['mean_v', 'mean_acc', 'mean_deac', 'std_jy'] for i, l in", "['train', 'val', 'test', 'sample'] #_obs_len = [2,5] #seg = _obs_len[0]", "| max:{} | q25:{} | q50:{} | q75:{}'.format(_mean, _std, _min,", "0.25, 0.5, 0.75 labels = ['mean_v', 'mean_acc', 'mean_deac', 'std_jy'] for", "q25:{} | q50:{} | q75:{}'.format(_mean, _std, _min, _max, _q25, _q50,", "argparse.ArgumentParser() parser.add_argument( \"--data_dir\", default=\"\", type=str, help=\"Directory where the features (npy", "help=\"Filter to process the data noise. (ekf/none/ekf-savgol/savgol\", choices=['ekf', 'none', 'ekf-savgol',", "not found!({args.data_dir})' data_file = 'features_{}_{}s_{}.npy'.format(args.mode, args.obs_len, args.filter) assert os.path.exists(os.path.join(args.data_dir, data_file)),\\", "Any, Dict, List, Tuple, NoReturn import argparse import os def", "os.path.exists(args.data_dir),\\ f'[Analysis][main][ERROR] data_dir not found!({args.data_dir})' data_file = 'features_{}_{}s_{}.npy'.format(args.mode, args.obs_len, args.filter)", "NoReturn: #central tendency : mean #dispersion : std #bounds :", "args = parse_arguments() if args.mode == 'test': args.obs_len = 2", "are saved\", ) parser.add_argument(\"--mode\", required=True, type=str, help=\"train/val/test/sample\", choices=['train', 'test', 'val','sample'])", "Dict, List, Tuple, NoReturn import argparse import os def parse_arguments()", "parser.add_argument(\"--filter\", default='ekf', type=str, help=\"Filter to process the data noise. (ekf/none/ekf-savgol/savgol\",", "arguments.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"--data_dir\", default=\"\", type=str, help=\"Directory where", "dataset....') # (m, 4) # [mean_v, mean_acc, mean_deac, std_jy] data", "def stats(traj:np.ndarray) -> NoReturn: #central tendency : mean #dispersion :", "noise. (ekf/none/ekf-savgol/savgol\", choices=['ekf', 'none', 'ekf-savgol', 'savgol']) return parser.parse_args() def stats(traj:np.ndarray)", "max:{} | q25:{} | q50:{} | q75:{}'.format(_mean, _std, _min, _max,", "print (f'Feature: {l}') print ('\\tmean:{} | std:{} | min:{} |", "('[Analysis] mode:{} | filter:{} | obs_len:{}'.format(args.mode, args.filter, args.obs_len)) print ('[Analysis]", "std #bounds : min max #quantile : 0.25, 0.5, 0.75", "[mean_v, mean_acc, mean_deac, std_jy] data = np.load(os.path.join(args.data_dir,data_file)) print ('[Analysis] mode:{}", "['mean_v', 'mean_acc', 'mean_deac', 'std_jy'] for i, l in zip(range(0, traj.shape[1]),", "from typing import Any, Dict, List, Tuple, NoReturn import argparse", "where the features (npy files) are saved\", ) parser.add_argument(\"--mode\", required=True,", "round(np.mean(t),2) _std = round(np.std(t),2) _min = round(np.min(t),2) _max = round(np.max(t),2)", "| filter:{} | obs_len:{}'.format(args.mode, args.filter, args.obs_len)) print ('[Analysis] data shape:{}'.format(data.shape))", "#mode = _modes[3] #filter_name = _filters[0] args = parse_arguments() if", ": 0.25, 0.5, 0.75 labels = ['mean_v', 'mean_acc', 'mean_deac', 'std_jy']", "_q50 = round(np.quantile(t, 0.5),2) _q75 = round(np.quantile(t, 0.75),2) print (f'Feature:", "'ekf', 'savgol', 'ekf-savgol'] #_modes = ['train', 'val', 'test', 'sample'] #_obs_len", "t = traj[:, i] _mean = round(np.mean(t),2) _std = round(np.std(t),2)", ": std #bounds : min max #quantile : 0.25, 0.5,", "0.5, 0.75 labels = ['mean_v', 'mean_acc', 'mean_deac', 'std_jy'] for i,", "['none', 'ekf', 'savgol', 'ekf-savgol'] #_modes = ['train', 'val', 'test', 'sample']", "help=\"Directory where the features (npy files) are saved\", ) parser.add_argument(\"--mode\",", "print ('[Analysis] mode:{} | filter:{} | obs_len:{}'.format(args.mode, args.filter, args.obs_len)) print", "import os def parse_arguments() -> Any: \"\"\"Parse command line arguments.\"\"\"", "# [mean_v, mean_acc, mean_deac, std_jy] data = np.load(os.path.join(args.data_dir,data_file)) print ('[Analysis]", "return parser.parse_args() def stats(traj:np.ndarray) -> NoReturn: #central tendency : mean", "0.75),2) print (f'Feature: {l}') print ('\\tmean:{} | std:{} | min:{}" ]
[ "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "m_get_notifier = p_get_notifier.start() self.addCleanup(p_get_notifier.stop) self.m_notifier = mock.Mock(spec=om.Notifier) def fake_get_notifier(publisher_id): self.m_notifier.publisher_id", "= publisher_id return self.m_notifier m_get_notifier.side_effect = fake_get_notifier def test_service_failed(self): service", "'last_seen_up': '2016-09-22T08:32:06Z', 'name': 'watcher-service', 'sevice_host': 'controller', 'status_update': { 'watcher_object.data': {", "limitations under the License. import datetime import freezegun import mock", "'1.0' } }, 'watcher_object.name': 'ServiceUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version': '1.0' },", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "watcher.objects import service as w_service from watcher.tests.db import base from", "state = w_service.ServiceStatus.FAILED notifications.service.send_service_update(mock.MagicMock(), service, state, host='node0') notification = self.m_notifier.warning.call_args[1]", "publisher_id return self.m_notifier m_get_notifier.side_effect = fake_get_notifier def test_service_failed(self): service =", "under the License. import datetime import freezegun import mock import", "{ 'last_seen_up': '2016-09-22T08:32:06Z', 'name': 'watcher-service', 'sevice_host': 'controller', 'status_update': { 'watcher_object.data':", "w_service.ServiceStatus.FAILED notifications.service.send_service_update(mock.MagicMock(), service, state, host='node0') notification = self.m_notifier.warning.call_args[1] payload =", "encoding: utf-8 -*- # Copyright (c) 2017 Servionica # #", "distributed under the License is distributed on an \"AS IS\"", "= fake_get_notifier def test_service_failed(self): service = utils.get_test_service(mock.Mock(), created_at=datetime.datetime.utcnow()) state =", "{ 'old_state': 'ACTIVE', 'state': 'FAILED' }, 'watcher_object.name': 'ServiceStatusUpdatePayload', 'watcher_object.namespace': 'watcher',", "notification['payload'] self.assertEqual(\"infra-optim:node0\", self.m_notifier.publisher_id) self.assertDictEqual({ 'watcher_object.data': { 'last_seen_up': '2016-09-22T08:32:06Z', 'name': 'watcher-service',", "'FAILED' }, 'watcher_object.name': 'ServiceStatusUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version': '1.0' } },", "or # implied. # See the License for the specific", "def setUp(self): super(TestActionPlanNotification, self).setUp() p_get_notifier = mock.patch.object(rpc, 'get_notifier') m_get_notifier =", "'watcher_object.name': 'ServiceStatusUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version': '1.0' } }, 'watcher_object.name': 'ServiceUpdatePayload',", "@freezegun.freeze_time('2016-10-18T09:52:05.219414') class TestActionPlanNotification(base.DbTestCase): def setUp(self): super(TestActionPlanNotification, self).setUp() p_get_notifier = mock.patch.object(rpc,", "the specific language governing permissions and # limitations under the", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "as w_service from watcher.tests.db import base from watcher.tests.objects import utils", "= mock.patch.object(rpc, 'get_notifier') m_get_notifier = p_get_notifier.start() self.addCleanup(p_get_notifier.stop) self.m_notifier = mock.Mock(spec=om.Notifier)", "= p_get_notifier.start() self.addCleanup(p_get_notifier.stop) self.m_notifier = mock.Mock(spec=om.Notifier) def fake_get_notifier(publisher_id): self.m_notifier.publisher_id =", "applicable law or agreed to in writing, software # distributed", "self.assertDictEqual({ 'watcher_object.data': { 'last_seen_up': '2016-09-22T08:32:06Z', 'name': 'watcher-service', 'sevice_host': 'controller', 'status_update':", "'ACTIVE', 'state': 'FAILED' }, 'watcher_object.name': 'ServiceStatusUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version': '1.0'", "except in compliance with the License. # You may obtain", "utils.get_test_service(mock.Mock(), created_at=datetime.datetime.utcnow()) state = w_service.ServiceStatus.FAILED notifications.service.send_service_update(mock.MagicMock(), service, state, host='node0') notification", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "self.m_notifier.publisher_id = publisher_id return self.m_notifier m_get_notifier.side_effect = fake_get_notifier def test_service_failed(self):", "test_service_failed(self): service = utils.get_test_service(mock.Mock(), created_at=datetime.datetime.utcnow()) state = w_service.ServiceStatus.FAILED notifications.service.send_service_update(mock.MagicMock(), service,", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "self.m_notifier.publisher_id) self.assertDictEqual({ 'watcher_object.data': { 'last_seen_up': '2016-09-22T08:32:06Z', 'name': 'watcher-service', 'sevice_host': 'controller',", "not use this file except in compliance with the License.", "express or # implied. # See the License for the", "'controller', 'status_update': { 'watcher_object.data': { 'old_state': 'ACTIVE', 'state': 'FAILED' },", "{ 'watcher_object.data': { 'old_state': 'ACTIVE', 'state': 'FAILED' }, 'watcher_object.name': 'ServiceStatusUpdatePayload',", "writing, software # distributed under the License is distributed on", "in writing, software # distributed under the License is distributed", "-*- # Copyright (c) 2017 Servionica # # Licensed under", "you may not use this file except in compliance with", "-*- encoding: utf-8 -*- # Copyright (c) 2017 Servionica #", "self).setUp() p_get_notifier = mock.patch.object(rpc, 'get_notifier') m_get_notifier = p_get_notifier.start() self.addCleanup(p_get_notifier.stop) self.m_notifier", "ANY KIND, either express or # implied. # See the", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "language governing permissions and # limitations under the License. import", "watcher.common import rpc from watcher import notifications from watcher.objects import", "2017 Servionica # # Licensed under the Apache License, Version", "use this file except in compliance with the License. #", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "def test_service_failed(self): service = utils.get_test_service(mock.Mock(), created_at=datetime.datetime.utcnow()) state = w_service.ServiceStatus.FAILED notifications.service.send_service_update(mock.MagicMock(),", "import base from watcher.tests.objects import utils @freezegun.freeze_time('2016-10-18T09:52:05.219414') class TestActionPlanNotification(base.DbTestCase): def", "super(TestActionPlanNotification, self).setUp() p_get_notifier = mock.patch.object(rpc, 'get_notifier') m_get_notifier = p_get_notifier.start() self.addCleanup(p_get_notifier.stop)", "= self.m_notifier.warning.call_args[1] payload = notification['payload'] self.assertEqual(\"infra-optim:node0\", self.m_notifier.publisher_id) self.assertDictEqual({ 'watcher_object.data': {", "Servionica # # Licensed under the Apache License, Version 2.0", "(c) 2017 Servionica # # Licensed under the Apache License,", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "fake_get_notifier def test_service_failed(self): service = utils.get_test_service(mock.Mock(), created_at=datetime.datetime.utcnow()) state = w_service.ServiceStatus.FAILED", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "OF ANY KIND, either express or # implied. # See", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "base from watcher.tests.objects import utils @freezegun.freeze_time('2016-10-18T09:52:05.219414') class TestActionPlanNotification(base.DbTestCase): def setUp(self):", "return self.m_notifier m_get_notifier.side_effect = fake_get_notifier def test_service_failed(self): service = utils.get_test_service(mock.Mock(),", "# You may obtain a copy of the License at", "'watcher_object.version': '1.0' } }, 'watcher_object.name': 'ServiceUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version': '1.0'", "specific language governing permissions and # limitations under the License.", "}, 'watcher_object.name': 'ServiceUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version': '1.0' }, payload )", "under the License is distributed on an \"AS IS\" BASIS,", "from watcher.tests.objects import utils @freezegun.freeze_time('2016-10-18T09:52:05.219414') class TestActionPlanNotification(base.DbTestCase): def setUp(self): super(TestActionPlanNotification,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "import notifications from watcher.objects import service as w_service from watcher.tests.db", "service = utils.get_test_service(mock.Mock(), created_at=datetime.datetime.utcnow()) state = w_service.ServiceStatus.FAILED notifications.service.send_service_update(mock.MagicMock(), service, state,", "License for the specific language governing permissions and # limitations", "notification = self.m_notifier.warning.call_args[1] payload = notification['payload'] self.assertEqual(\"infra-optim:node0\", self.m_notifier.publisher_id) self.assertDictEqual({ 'watcher_object.data':", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "either express or # implied. # See the License for", "service, state, host='node0') notification = self.m_notifier.warning.call_args[1] payload = notification['payload'] self.assertEqual(\"infra-optim:node0\",", "watcher import notifications from watcher.objects import service as w_service from", "'ServiceStatusUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version': '1.0' } }, 'watcher_object.name': 'ServiceUpdatePayload', 'watcher_object.namespace':", "'name': 'watcher-service', 'sevice_host': 'controller', 'status_update': { 'watcher_object.data': { 'old_state': 'ACTIVE',", "om from watcher.common import rpc from watcher import notifications from", "class TestActionPlanNotification(base.DbTestCase): def setUp(self): super(TestActionPlanNotification, self).setUp() p_get_notifier = mock.patch.object(rpc, 'get_notifier')", "p_get_notifier.start() self.addCleanup(p_get_notifier.stop) self.m_notifier = mock.Mock(spec=om.Notifier) def fake_get_notifier(publisher_id): self.m_notifier.publisher_id = publisher_id", "the License for the specific language governing permissions and #", "from watcher.objects import service as w_service from watcher.tests.db import base", "(the \"License\"); # you may not use this file except", "import oslo_messaging as om from watcher.common import rpc from watcher", "mock.patch.object(rpc, 'get_notifier') m_get_notifier = p_get_notifier.start() self.addCleanup(p_get_notifier.stop) self.m_notifier = mock.Mock(spec=om.Notifier) def", "Apache License, Version 2.0 (the \"License\"); # you may not", "notifications.service.send_service_update(mock.MagicMock(), service, state, host='node0') notification = self.m_notifier.warning.call_args[1] payload = notification['payload']", "# you may not use this file except in compliance", "governing permissions and # limitations under the License. import datetime", "p_get_notifier = mock.patch.object(rpc, 'get_notifier') m_get_notifier = p_get_notifier.start() self.addCleanup(p_get_notifier.stop) self.m_notifier =", "rpc from watcher import notifications from watcher.objects import service as", "freezegun import mock import oslo_messaging as om from watcher.common import", "datetime import freezegun import mock import oslo_messaging as om from", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "watcher.tests.objects import utils @freezegun.freeze_time('2016-10-18T09:52:05.219414') class TestActionPlanNotification(base.DbTestCase): def setUp(self): super(TestActionPlanNotification, self).setUp()", "the License is distributed on an \"AS IS\" BASIS, #", "in compliance with the License. # You may obtain a", "'old_state': 'ACTIVE', 'state': 'FAILED' }, 'watcher_object.name': 'ServiceStatusUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version':", "} }, 'watcher_object.name': 'ServiceUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version': '1.0' }, payload", "software # distributed under the License is distributed on an", "service as w_service from watcher.tests.db import base from watcher.tests.objects import", "self.m_notifier m_get_notifier.side_effect = fake_get_notifier def test_service_failed(self): service = utils.get_test_service(mock.Mock(), created_at=datetime.datetime.utcnow())", "mock.Mock(spec=om.Notifier) def fake_get_notifier(publisher_id): self.m_notifier.publisher_id = publisher_id return self.m_notifier m_get_notifier.side_effect =", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or #", "import service as w_service from watcher.tests.db import base from watcher.tests.objects", "utils @freezegun.freeze_time('2016-10-18T09:52:05.219414') class TestActionPlanNotification(base.DbTestCase): def setUp(self): super(TestActionPlanNotification, self).setUp() p_get_notifier =", "# # Unless required by applicable law or agreed to", "# implied. # See the License for the specific language", "'watcher_object.data': { 'last_seen_up': '2016-09-22T08:32:06Z', 'name': 'watcher-service', 'sevice_host': 'controller', 'status_update': {", "import mock import oslo_messaging as om from watcher.common import rpc", "License. import datetime import freezegun import mock import oslo_messaging as", "Copyright (c) 2017 Servionica # # Licensed under the Apache", "utf-8 -*- # Copyright (c) 2017 Servionica # # Licensed", "TestActionPlanNotification(base.DbTestCase): def setUp(self): super(TestActionPlanNotification, self).setUp() p_get_notifier = mock.patch.object(rpc, 'get_notifier') m_get_notifier", "'watcher', 'watcher_object.version': '1.0' } }, 'watcher_object.name': 'ServiceUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version':", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "'2016-09-22T08:32:06Z', 'name': 'watcher-service', 'sevice_host': 'controller', 'status_update': { 'watcher_object.data': { 'old_state':", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "= notification['payload'] self.assertEqual(\"infra-optim:node0\", self.m_notifier.publisher_id) self.assertDictEqual({ 'watcher_object.data': { 'last_seen_up': '2016-09-22T08:32:06Z', 'name':", "payload = notification['payload'] self.assertEqual(\"infra-optim:node0\", self.m_notifier.publisher_id) self.assertDictEqual({ 'watcher_object.data': { 'last_seen_up': '2016-09-22T08:32:06Z',", "Version 2.0 (the \"License\"); # you may not use this", "# -*- encoding: utf-8 -*- # Copyright (c) 2017 Servionica", "mock import oslo_messaging as om from watcher.common import rpc from", "law or agreed to in writing, software # distributed under", "'watcher-service', 'sevice_host': 'controller', 'status_update': { 'watcher_object.data': { 'old_state': 'ACTIVE', 'state':", "'watcher_object.data': { 'old_state': 'ACTIVE', 'state': 'FAILED' }, 'watcher_object.name': 'ServiceStatusUpdatePayload', 'watcher_object.namespace':", "created_at=datetime.datetime.utcnow()) state = w_service.ServiceStatus.FAILED notifications.service.send_service_update(mock.MagicMock(), service, state, host='node0') notification =", "'state': 'FAILED' }, 'watcher_object.name': 'ServiceStatusUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version': '1.0' }", "permissions and # limitations under the License. import datetime import", "# Copyright (c) 2017 Servionica # # Licensed under the", "oslo_messaging as om from watcher.common import rpc from watcher import", "from watcher.tests.db import base from watcher.tests.objects import utils @freezegun.freeze_time('2016-10-18T09:52:05.219414') class", "KIND, either express or # implied. # See the License", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "= utils.get_test_service(mock.Mock(), created_at=datetime.datetime.utcnow()) state = w_service.ServiceStatus.FAILED notifications.service.send_service_update(mock.MagicMock(), service, state, host='node0')", "self.m_notifier.warning.call_args[1] payload = notification['payload'] self.assertEqual(\"infra-optim:node0\", self.m_notifier.publisher_id) self.assertDictEqual({ 'watcher_object.data': { 'last_seen_up':", "\"License\"); # you may not use this file except in", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "def fake_get_notifier(publisher_id): self.m_notifier.publisher_id = publisher_id return self.m_notifier m_get_notifier.side_effect = fake_get_notifier", "setUp(self): super(TestActionPlanNotification, self).setUp() p_get_notifier = mock.patch.object(rpc, 'get_notifier') m_get_notifier = p_get_notifier.start()", "CONDITIONS OF ANY KIND, either express or # implied. #", "self.m_notifier = mock.Mock(spec=om.Notifier) def fake_get_notifier(publisher_id): self.m_notifier.publisher_id = publisher_id return self.m_notifier", "import datetime import freezegun import mock import oslo_messaging as om", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "See the License for the specific language governing permissions and", "m_get_notifier.side_effect = fake_get_notifier def test_service_failed(self): service = utils.get_test_service(mock.Mock(), created_at=datetime.datetime.utcnow()) state", "w_service from watcher.tests.db import base from watcher.tests.objects import utils @freezegun.freeze_time('2016-10-18T09:52:05.219414')", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "to in writing, software # distributed under the License is", "'get_notifier') m_get_notifier = p_get_notifier.start() self.addCleanup(p_get_notifier.stop) self.m_notifier = mock.Mock(spec=om.Notifier) def fake_get_notifier(publisher_id):", "from watcher import notifications from watcher.objects import service as w_service", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# See the License for the specific language governing permissions", "= w_service.ServiceStatus.FAILED notifications.service.send_service_update(mock.MagicMock(), service, state, host='node0') notification = self.m_notifier.warning.call_args[1] payload", "OR CONDITIONS OF ANY KIND, either express or # implied.", "You may obtain a copy of the License at #", "notifications from watcher.objects import service as w_service from watcher.tests.db import", "state, host='node0') notification = self.m_notifier.warning.call_args[1] payload = notification['payload'] self.assertEqual(\"infra-optim:node0\", self.m_notifier.publisher_id)", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "self.assertEqual(\"infra-optim:node0\", self.m_notifier.publisher_id) self.assertDictEqual({ 'watcher_object.data': { 'last_seen_up': '2016-09-22T08:32:06Z', 'name': 'watcher-service', 'sevice_host':", "required by applicable law or agreed to in writing, software", "the License. import datetime import freezegun import mock import oslo_messaging", "from watcher.common import rpc from watcher import notifications from watcher.objects", "watcher.tests.db import base from watcher.tests.objects import utils @freezegun.freeze_time('2016-10-18T09:52:05.219414') class TestActionPlanNotification(base.DbTestCase):", "as om from watcher.common import rpc from watcher import notifications", "host='node0') notification = self.m_notifier.warning.call_args[1] payload = notification['payload'] self.assertEqual(\"infra-optim:node0\", self.m_notifier.publisher_id) self.assertDictEqual({", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "'status_update': { 'watcher_object.data': { 'old_state': 'ACTIVE', 'state': 'FAILED' }, 'watcher_object.name':", "import freezegun import mock import oslo_messaging as om from watcher.common", "with the License. # You may obtain a copy of", "import rpc from watcher import notifications from watcher.objects import service", "this file except in compliance with the License. # You", "}, 'watcher_object.name': 'ServiceStatusUpdatePayload', 'watcher_object.namespace': 'watcher', 'watcher_object.version': '1.0' } }, 'watcher_object.name':", "and # limitations under the License. import datetime import freezegun", "the Apache License, Version 2.0 (the \"License\"); # you may", "self.addCleanup(p_get_notifier.stop) self.m_notifier = mock.Mock(spec=om.Notifier) def fake_get_notifier(publisher_id): self.m_notifier.publisher_id = publisher_id return", "= mock.Mock(spec=om.Notifier) def fake_get_notifier(publisher_id): self.m_notifier.publisher_id = publisher_id return self.m_notifier m_get_notifier.side_effect", "fake_get_notifier(publisher_id): self.m_notifier.publisher_id = publisher_id return self.m_notifier m_get_notifier.side_effect = fake_get_notifier def", "'sevice_host': 'controller', 'status_update': { 'watcher_object.data': { 'old_state': 'ACTIVE', 'state': 'FAILED'", "'watcher_object.namespace': 'watcher', 'watcher_object.version': '1.0' } }, 'watcher_object.name': 'ServiceUpdatePayload', 'watcher_object.namespace': 'watcher',", "import utils @freezegun.freeze_time('2016-10-18T09:52:05.219414') class TestActionPlanNotification(base.DbTestCase): def setUp(self): super(TestActionPlanNotification, self).setUp() p_get_notifier", "# limitations under the License. import datetime import freezegun import" ]
[ "the same time (ie, you must sell the stock before", "for which the ith element is the price of a", "= prices[0] tend = 0 # 0:down, 1:up for i", "prices[i] < prices[i - 1]: # go down if tend", "go down if tend == 1: max_val = prices[i -", "You may complete as many transactions as you like (ie,", "# go down if tend == 1: max_val = prices[i", "if prices[i] > prices[i - 1]: # go up if", "== '__main__': prices = [8,9,2,5] s = Solution() print s.maxProfit(prices)", "profit_list.append(max_val - min_val) tend = 0 pass if prices[i] >", "you buy again). class Solution(object): def maxProfit(self, prices): \"\"\" :type", "= prices[0] max_val = prices[0] tend = 0 # 0:down,", "maxProfit(self, prices): \"\"\" :type prices: List[int] :rtype: int \"\"\" if", "= 0 pass if prices[i] > prices[i - 1]: #", "at the same time (ie, you must sell the stock", "if prices == []: return 0 profit_list = [] min_val", "# https://leetcode.com/problems/best-time-to-buy-and-sell-stock-ii/ # # Say you have an array for", "you like (ie, buy one and sell one share of", "prices[i - 1]: # go up if tend == 0:", "prices == []: return 0 profit_list = [] min_val =", "of the stock multiple times). # However, you may not", "- min_val) tend = 0 pass if prices[i] > prices[i", "time (ie, you must sell the stock before you buy", "if tend == 1: max_val = prices[i - 1] profit_list.append(max_val", "if tend == 0: min_val = prices[i - 1] tend", "and sell one share of the stock multiple times). #", "before you buy again). class Solution(object): def maxProfit(self, prices): \"\"\"", "which the ith element is the price of a given", "1: profit_list.append(prices[i] - min_val) return sum(profit_list) if __name__ == '__main__':", "ith element is the price of a given stock on", "to find the maximum profit. # You may complete as", "profit_list.append(prices[i] - min_val) return sum(profit_list) if __name__ == '__main__': prices", "return sum(profit_list) if __name__ == '__main__': prices = [8,9,2,5] s", "min_val) tend = 0 pass if prices[i] > prices[i -", "(ie, you must sell the stock before you buy again).", "prices[i] > prices[i - 1]: # go up if tend", "stock multiple times). # However, you may not engage in", "\"\"\" if prices == []: return 0 profit_list = []", "engage in multiple transactions at the same time (ie, you", "0 # 0:down, 1:up for i in range(1, len(prices)): if", "an algorithm to find the maximum profit. # You may", "prices[0] max_val = prices[0] tend = 0 # 0:down, 1:up", "- min_val) return sum(profit_list) if __name__ == '__main__': prices =", "len(prices)): if prices[i] < prices[i - 1]: # go down", "tend = 1 pass if tend == 1: profit_list.append(prices[i] -", "profit_list = [] min_val = prices[0] max_val = prices[0] tend", "pass if tend == 1: profit_list.append(prices[i] - min_val) return sum(profit_list)", "Solution(object): def maxProfit(self, prices): \"\"\" :type prices: List[int] :rtype: int", "prices[i - 1]: # go down if tend == 1:", "of a given stock on day i. # # Design", "algorithm to find the maximum profit. # You may complete", "tend == 1: max_val = prices[i - 1] profit_list.append(max_val -", "the stock before you buy again). class Solution(object): def maxProfit(self,", "# 0:down, 1:up for i in range(1, len(prices)): if prices[i]", "(ie, buy one and sell one share of the stock", "tend == 1: profit_list.append(prices[i] - min_val) return sum(profit_list) if __name__", "multiple transactions at the same time (ie, you must sell", "- 1] tend = 1 pass if tend == 1:", "- 1] profit_list.append(max_val - min_val) tend = 0 pass if", "i in range(1, len(prices)): if prices[i] < prices[i - 1]:", "share of the stock multiple times). # However, you may", "return 0 profit_list = [] min_val = prices[0] max_val =", "go up if tend == 0: min_val = prices[i -", "day i. # # Design an algorithm to find the", "stock on day i. # # Design an algorithm to", "multiple times). # However, you may not engage in multiple", "range(1, len(prices)): if prices[i] < prices[i - 1]: # go", "= prices[i - 1] profit_list.append(max_val - min_val) tend = 0", "def maxProfit(self, prices): \"\"\" :type prices: List[int] :rtype: int \"\"\"", "transactions at the same time (ie, you must sell the", ":type prices: List[int] :rtype: int \"\"\" if prices == []:", "List[int] :rtype: int \"\"\" if prices == []: return 0", "tend == 0: min_val = prices[i - 1] tend =", "min_val) return sum(profit_list) if __name__ == '__main__': prices = [8,9,2,5]", "< prices[i - 1]: # go down if tend ==", "one share of the stock multiple times). # However, you", "https://leetcode.com/problems/best-time-to-buy-and-sell-stock-ii/ # # Say you have an array for which", "prices: List[int] :rtype: int \"\"\" if prices == []: return", "== []: return 0 profit_list = [] min_val = prices[0]", "min_val = prices[0] max_val = prices[0] tend = 0 #", "if tend == 1: profit_list.append(prices[i] - min_val) return sum(profit_list) if", "price of a given stock on day i. # #", "tend = 0 # 0:down, 1:up for i in range(1,", "= prices[i - 1] tend = 1 pass if tend", "sum(profit_list) if __name__ == '__main__': prices = [8,9,2,5] s =", "as many transactions as you like (ie, buy one and", "you have an array for which the ith element is", "you may not engage in multiple transactions at the same", "stock before you buy again). class Solution(object): def maxProfit(self, prices):", "element is the price of a given stock on day", "in range(1, len(prices)): if prices[i] < prices[i - 1]: #", "prices[0] tend = 0 # 0:down, 1:up for i in", "max_val = prices[0] tend = 0 # 0:down, 1:up for", "buy again). class Solution(object): def maxProfit(self, prices): \"\"\" :type prices:", "the maximum profit. # You may complete as many transactions", "sell the stock before you buy again). class Solution(object): def", "1]: # go up if tend == 0: min_val =", "[]: return 0 profit_list = [] min_val = prices[0] max_val", "# Design an algorithm to find the maximum profit. #", "However, you may not engage in multiple transactions at the", "maximum profit. # You may complete as many transactions as", "same time (ie, you must sell the stock before you", "1:up for i in range(1, len(prices)): if prices[i] < prices[i", "\"\"\" :type prices: List[int] :rtype: int \"\"\" if prices ==", "int \"\"\" if prices == []: return 0 profit_list =", "== 1: profit_list.append(prices[i] - min_val) return sum(profit_list) if __name__ ==", "> prices[i - 1]: # go up if tend ==", "- 1]: # go up if tend == 0: min_val", "# You may complete as many transactions as you like", "[] min_val = prices[0] max_val = prices[0] tend = 0", "0:down, 1:up for i in range(1, len(prices)): if prices[i] <", "may not engage in multiple transactions at the same time", "have an array for which the ith element is the", "= [] min_val = prices[0] max_val = prices[0] tend =", "Say you have an array for which the ith element", "one and sell one share of the stock multiple times).", "if __name__ == '__main__': prices = [8,9,2,5] s = Solution()", "buy one and sell one share of the stock multiple", "if prices[i] < prices[i - 1]: # go down if", "tend = 0 pass if prices[i] > prices[i - 1]:", "__name__ == '__main__': prices = [8,9,2,5] s = Solution() print", "given stock on day i. # # Design an algorithm", "the ith element is the price of a given stock", "as you like (ie, buy one and sell one share", "# However, you may not engage in multiple transactions at", "many transactions as you like (ie, buy one and sell", "like (ie, buy one and sell one share of the", "# go up if tend == 0: min_val = prices[i", "prices): \"\"\" :type prices: List[int] :rtype: int \"\"\" if prices", "profit. # You may complete as many transactions as you", "class Solution(object): def maxProfit(self, prices): \"\"\" :type prices: List[int] :rtype:", ":rtype: int \"\"\" if prices == []: return 0 profit_list", "prices[i - 1] tend = 1 pass if tend ==", "must sell the stock before you buy again). class Solution(object):", "sell one share of the stock multiple times). # However,", "prices[i - 1] profit_list.append(max_val - min_val) tend = 0 pass", "you must sell the stock before you buy again). class", "down if tend == 1: max_val = prices[i - 1]", "min_val = prices[i - 1] tend = 1 pass if", "# Say you have an array for which the ith", "a given stock on day i. # # Design an", "array for which the ith element is the price of", "# # Say you have an array for which the", "Design an algorithm to find the maximum profit. # You", "1]: # go down if tend == 1: max_val =", "max_val = prices[i - 1] profit_list.append(max_val - min_val) tend =", "1] tend = 1 pass if tend == 1: profit_list.append(prices[i]", "1 pass if tend == 1: profit_list.append(prices[i] - min_val) return", "find the maximum profit. # You may complete as many", "= 1 pass if tend == 1: profit_list.append(prices[i] - min_val)", "- 1]: # go down if tend == 1: max_val", "an array for which the ith element is the price", "== 1: max_val = prices[i - 1] profit_list.append(max_val - min_val)", "pass if prices[i] > prices[i - 1]: # go up", "1: max_val = prices[i - 1] profit_list.append(max_val - min_val) tend", "in multiple transactions at the same time (ie, you must", "may complete as many transactions as you like (ie, buy", "times). # However, you may not engage in multiple transactions", "the stock multiple times). # However, you may not engage", "1] profit_list.append(max_val - min_val) tend = 0 pass if prices[i]", "0 profit_list = [] min_val = prices[0] max_val = prices[0]", "not engage in multiple transactions at the same time (ie,", "i. # # Design an algorithm to find the maximum", "is the price of a given stock on day i.", "complete as many transactions as you like (ie, buy one", "on day i. # # Design an algorithm to find", "0: min_val = prices[i - 1] tend = 1 pass", "the price of a given stock on day i. #", "transactions as you like (ie, buy one and sell one", "again). class Solution(object): def maxProfit(self, prices): \"\"\" :type prices: List[int]", "= 0 # 0:down, 1:up for i in range(1, len(prices)):", "for i in range(1, len(prices)): if prices[i] < prices[i -", "up if tend == 0: min_val = prices[i - 1]", "0 pass if prices[i] > prices[i - 1]: # go", "== 0: min_val = prices[i - 1] tend = 1", "# # Design an algorithm to find the maximum profit." ]
[ "response = self.client.get(url) response_address = response.json()['address'] self.assertEqual(response.status_code, 404) self.assertEqual(response_address, '')", "django.urls import reverse from .. import TestAdminMixin, TestLociMixin class BaseTestAdmin(TestAdminMixin,", "'coordinates': [12.512124, 41.898903]}, } self.assertEqual(content1, expected) @responses.activate def test_geocode(self): self._login_as_admin()", "address ) # Mock HTTP request to the URL to", "= 'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/' def test_location_list(self): self._login_as_admin() self._create_location(name='test-admin-location-1') url = reverse('{0}_location_changelist'.format(self.url_prefix)) r", "def test_location_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1', type='indoor') fl = self._create_floorplan(location=loc)", "test_is_mobile_location_json_view(self): self._login_as_admin() loc = self._create_location(is_mobile=True, geometry=None) response = self.client.get( reverse('admin:django_loci_location_json',", "test_geocode_invalid_address(self): self._login_as_admin() invalid_address = 'thisaddressisnotvalid123abc' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), invalid_address", "self.client.get( reverse('admin:django_loci_location_json', args=[loc.pk]) ) self.assertEqual(response.status_code, 200) content = json.loads(response.content) self.assertEqual(content['geometry'],", "responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode.json'), content_type='application/json', ) response = self.client.get(url) response_lat", "'&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode-invalid-address.json'), content_type='application/json', ) response = self.client.get(url) expected = {'error':", "'Red Square' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), address ) # Mock", "self._login_as_admin() fl = self._create_floorplan() r = self.client.get( reverse('admin:django_loci_location_floorplans_json', args=[fl.location.pk]) )", "lat = -30 lng = -30 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'),", "r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_change_image_removed(self): self._login_as_admin() loc =", ") self.assertEqual(response1.status_code, 200) content1 = json.loads(response1.content) expected = { 'name':", "= self.client.get(url) expected = {'error': 'Address parameter not defined'} self.assertEqual(response.status_code,", "= self.client.get(url) self.assertEqual(response.status_code, 200) self.assertContains(response, 'POL') @responses.activate def test_reverse_location_with_no_address(self): self._login_as_admin()", "request to the URL to work offline responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1',", "expected = {'error': 'Not found location with given name'} self.assertEqual(response.status_code,", "lng ) responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326', body=self._load_content( 'base/static/test-reverse-location-with-no-address.json' ), content_type='application/json', )", "address='loc2 add', type='outdoor' ) response1 = self.client.get( reverse('admin:django_loci_location_json', args=[loc1.pk]) )", "self.assertEqual(content1, expected) @responses.activate def test_geocode(self): self._login_as_admin() address = 'Red Square'", "class BaseTestAdmin(TestAdminMixin, TestLociMixin): geocode_url = 'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/' def test_location_list(self): self._login_as_admin() self._create_location(name='test-admin-location-1')", "BaseTestAdmin(TestAdminMixin, TestLociMixin): geocode_url = 'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/' def test_location_list(self): self._login_as_admin() self._create_location(name='test-admin-location-1') url", "404) self.assertEqual(response.json(), expected) @responses.activate def test_reverse_geocode(self): self._login_as_admin() lat = 52", ") self.assertEqual(response.status_code, 200) content = json.loads(response.content) self.assertEqual(content['geometry'], None) loc1 =", "= reverse('{0}_location_change'.format(self.url_prefix), args=[loc.pk]) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_change_image_removed(self):", "lat, lng ) responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326', body=self._load_content( 'base/static/test-reverse-location-with-no-address.json' ), content_type='application/json',", "'test-admin-location-1') def test_floorplan_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1', type='indoor') fl =", "loc = self._create_location(is_mobile=True, geometry=None) response = self.client.get( reverse('admin:django_loci_location_json', args=[loc.pk]) )", "self._login_as_admin() url = reverse('admin:django_loci_location_reverse_geocode_api') response = self.client.get(url) expected = {'error':", "self._create_location(is_mobile=True, geometry=None) response = self.client.get( reverse('admin:django_loci_location_json', args=[loc.pk]) ) self.assertEqual(response.status_code, 200)", "test_location_list(self): self._login_as_admin() self._create_location(name='test-admin-location-1') url = reverse('{0}_location_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r,", "= self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_list(self): self._login_as_admin() self._create_floorplan() self._create_location() url", "self._login_as_admin() address = 'Red Square' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), address", "self.assertEqual(response.status_code, 404) self.assertEqual(response_address, '') def test_reverse_geocode_no_coords(self): self._login_as_admin() url = reverse('admin:django_loci_location_reverse_geocode_api')", "'location2', 'address': 'loc2 add', 'type': 'outdoor', 'is_mobile': False, 'geometry': {'type':", "remove floorplan image os.remove(fl.image.path) url = reverse('{0}_location_change'.format(self.url_prefix), args=[loc.pk]) r =", "def test_floorplan_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1', type='indoor') fl = self._create_floorplan(location=loc)", "'image_height': fl.image.height, } ] } self.assertDictEqual(r.json(), expected) def test_location_change_image_removed(self): self._login_as_admin()", "self.assertEqual(response.status_code, 200) self.assertEqual(response_lat, 56) self.assertEqual(response_lng, 38) def test_geocode_no_address(self): self._login_as_admin() url", "add', 'type': 'outdoor', 'is_mobile': False, 'geometry': {'type': 'Point', 'coordinates': [12.512124,", "not defined'} self.assertEqual(response.status_code, 400) self.assertEqual(response.json(), expected) @responses.activate def test_geocode_invalid_address(self): self._login_as_admin()", "'floor': fl.floor, 'image': fl.image.url, 'image_width': fl.image.width, 'image_height': fl.image.height, } ]", "loc.address, 'type': loc.type, 'is_mobile': loc.is_mobile, 'geometry': json.loads(loc.geometry.json), } self.assertDictEqual(r.json(), expected)", "f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc' '&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode-invalid-address.json'), content_type='application/json', ) response = self.client.get(url) expected =", "self.assertContains(r, '1st floor') def test_location_json_view(self): self._login_as_admin() loc = self._create_location() r", "self.client.get(url) self.assertContains(r, '1st floor') def test_location_json_view(self): self._login_as_admin() loc = self._create_location()", "lng = -30 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng )", "'test-admin-location-1') def test_is_mobile_location_json_view(self): self._login_as_admin() loc = self._create_location(is_mobile=True, geometry=None) response =", "loc = self._create_location(name='test-admin-location-1', type='indoor') fl = self._create_floorplan(location=loc) # remove floorplan", "args=[loc.pk])) expected = { 'name': loc.name, 'address': loc.address, 'type': loc.type,", "args=[fl.pk]) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_is_mobile_location_json_view(self): self._login_as_admin() loc", "given name'} self.assertEqual(response.status_code, 404) self.assertEqual(response.json(), expected) @responses.activate def test_reverse_geocode(self): self._login_as_admin()", "content_type='application/json', ) response = self.client.get(url) expected = {'error': 'Not found", "= self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_is_mobile_location_json_view(self): self._login_as_admin() loc = self._create_location(is_mobile=True,", "reverse('admin:django_loci_location_json', args=[loc.pk]) ) self.assertEqual(response.status_code, 200) content = json.loads(response.content) self.assertEqual(content['geometry'], None)", "response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertContains(response, 'POL') @responses.activate def test_reverse_location_with_no_address(self):", "'POL') @responses.activate def test_reverse_location_with_no_address(self): self._login_as_admin() lat = -30 lng =", "test_geocode_no_address(self): self._login_as_admin() url = reverse('admin:django_loci_location_geocode_api') response = self.client.get(url) expected =", "self._login_as_admin() self._create_location(name='test-admin-location-1') url = reverse('{0}_location_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1')", "TestLociMixin class BaseTestAdmin(TestAdminMixin, TestLociMixin): geocode_url = 'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/' def test_location_list(self): self._login_as_admin()", "self.assertEqual(response.status_code, 200) content = json.loads(response.content) self.assertEqual(content['geometry'], None) loc1 = self._create_location(", "self._login_as_admin() url = reverse('admin:django_loci_location_geocode_api') response = self.client.get(url) expected = {'error':", "{ 'name': loc.name, 'address': loc.address, 'type': loc.type, 'is_mobile': loc.is_mobile, 'geometry':", "= reverse('{0}_floorplan_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r, '1st floor') def test_location_json_view(self):", ") responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc' '&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode-invalid-address.json'), content_type='application/json', ) response =", "url = reverse('admin:django_loci_location_geocode_api') response = self.client.get(url) expected = {'error': 'Address", "self.client.get( reverse('admin:django_loci_location_floorplans_json', args=[fl.location.pk]) ) expected = { 'choices': [ {", "test_location_json_view(self): self._login_as_admin() loc = self._create_location() r = self.client.get(reverse('admin:django_loci_location_json', args=[loc.pk])) expected", "add', type='outdoor' ) response1 = self.client.get( reverse('admin:django_loci_location_json', args=[loc1.pk]) ) self.assertEqual(response1.status_code,", ") expected = { 'choices': [ { 'id': str(fl.pk), 'str':", "= {'error': 'Not found location with given name'} self.assertEqual(response.status_code, 404)", "reverse('admin:django_loci_location_geocode_api') response = self.client.get(url) expected = {'error': 'Address parameter not", "= self.client.get( reverse('admin:django_loci_location_floorplans_json', args=[fl.location.pk]) ) expected = { 'choices': [", "self.assertEqual(content['geometry'], None) loc1 = self._create_location( name='location2', address='loc2 add', type='outdoor' )", "offline responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode.json'), content_type='application/json', ) response = self.client.get(url)", "test_geocode(self): self._login_as_admin() address = 'Red Square' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'),", "{ 'id': str(fl.pk), 'str': str(fl), 'floor': fl.floor, 'image': fl.image.url, 'image_width':", "expected = { 'choices': [ { 'id': str(fl.pk), 'str': str(fl),", "response_lng = round(response.json()['lng']) self.assertEqual(response.status_code, 200) self.assertEqual(response_lat, 56) self.assertEqual(response_lng, 38) def", "'choices': [ { 'id': str(fl.pk), 'str': str(fl), 'floor': fl.floor, 'image':", "reverse('admin:django_loci_location_json', args=[loc1.pk]) ) self.assertEqual(response1.status_code, 200) content1 = json.loads(response1.content) expected =", "lat, lng ) # Mock HTTP request to the URL", "'address': loc.address, 'type': loc.type, 'is_mobile': loc.is_mobile, 'geometry': json.loads(loc.geometry.json), } self.assertDictEqual(r.json(),", "self.assertDictEqual(r.json(), expected) def test_location_floorplan_json_view(self): self._login_as_admin() fl = self._create_floorplan() r =", "= self.client.get(reverse('admin:django_loci_location_json', args=[loc.pk])) expected = { 'name': loc.name, 'address': loc.address,", "invalid_address ) responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc' '&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode-invalid-address.json'), content_type='application/json', ) response", "= self._create_floorplan(location=loc) # remove floorplan image os.remove(fl.image.path) url = reverse('{0}_floorplan_change'.format(self.url_prefix),", "response_address = response.json()['address'] self.assertEqual(response.status_code, 404) self.assertEqual(response_address, '') def test_reverse_geocode_no_coords(self): self._login_as_admin()", "responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326', body=self._load_content('base/static/test-reverse-geocode.json'), content_type='application/json', ) response = self.client.get(url) self.assertEqual(response.status_code,", "import reverse from .. import TestAdminMixin, TestLociMixin class BaseTestAdmin(TestAdminMixin, TestLociMixin):", "the URL to work offline responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326', body=self._load_content('base/static/test-reverse-geocode.json'), content_type='application/json',", "floorplan image os.remove(fl.image.path) url = reverse('{0}_floorplan_change'.format(self.url_prefix), args=[fl.pk]) r = self.client.get(url)", "os.remove(fl.image.path) url = reverse('{0}_floorplan_change'.format(self.url_prefix), args=[fl.pk]) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1')", "self.assertEqual(response_lat, 56) self.assertEqual(response_lng, 38) def test_geocode_no_address(self): self._login_as_admin() url = reverse('admin:django_loci_location_geocode_api')", ") response = self.client.get(url) response_lat = round(response.json()['lat']) response_lng = round(response.json()['lng'])", "'geometry': {'type': 'Point', 'coordinates': [12.512124, 41.898903]}, } self.assertEqual(content1, expected) @responses.activate", "= self.client.get(url) expected = {'error': 'Not found location with given", "@responses.activate def test_geocode_invalid_address(self): self._login_as_admin() invalid_address = 'thisaddressisnotvalid123abc' url = '{0}?address={1}'.format(", "name'} self.assertEqual(response.status_code, 404) self.assertEqual(response.json(), expected) @responses.activate def test_reverse_geocode(self): self._login_as_admin() lat", "def test_is_mobile_location_json_view(self): self._login_as_admin() loc = self._create_location(is_mobile=True, geometry=None) response = self.client.get(", "import json import os import responses from django.urls import reverse", ") response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertContains(response, 'POL') @responses.activate def", "self.assertEqual(response.json(), expected) @responses.activate def test_reverse_geocode(self): self._login_as_admin() lat = 52 lng", "self.assertContains(response, 'POL') @responses.activate def test_reverse_location_with_no_address(self): self._login_as_admin() lat = -30 lng", "reverse('{0}_floorplan_change'.format(self.url_prefix), args=[fl.pk]) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_is_mobile_location_json_view(self): self._login_as_admin()", "-30 lng = -30 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng", "body=self._load_content('base/static/test-reverse-geocode.json'), content_type='application/json', ) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertContains(response, 'POL')", "200) content = json.loads(response.content) self.assertEqual(content['geometry'], None) loc1 = self._create_location( name='location2',", "args=[fl.location.pk]) ) expected = { 'choices': [ { 'id': str(fl.pk),", "'lat or lng parameter not defined'} self.assertEqual(response.status_code, 400) self.assertEqual(response.json(), expected)", "floor') def test_location_json_view(self): self._login_as_admin() loc = self._create_location() r = self.client.get(reverse('admin:django_loci_location_json',", "'is_mobile': loc.is_mobile, 'geometry': json.loads(loc.geometry.json), } self.assertDictEqual(r.json(), expected) def test_location_floorplan_json_view(self): self._login_as_admin()", "json.loads(response.content) self.assertEqual(content['geometry'], None) loc1 = self._create_location( name='location2', address='loc2 add', type='outdoor'", "Square' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), address ) # Mock HTTP", "loc.type, 'is_mobile': loc.is_mobile, 'geometry': json.loads(loc.geometry.json), } self.assertDictEqual(r.json(), expected) def test_location_floorplan_json_view(self):", "'base/static/test-reverse-location-with-no-address.json' ), content_type='application/json', ) response = self.client.get(url) response_address = response.json()['address']", "[ { 'id': str(fl.pk), 'str': str(fl), 'floor': fl.floor, 'image': fl.image.url,", "content_type='application/json', ) response = self.client.get(url) response_lat = round(response.json()['lat']) response_lng =", "= -30 lng = -30 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat,", "url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326',", "def test_location_floorplan_json_view(self): self._login_as_admin() fl = self._create_floorplan() r = self.client.get( reverse('admin:django_loci_location_floorplans_json',", "'name': loc.name, 'address': loc.address, 'type': loc.type, 'is_mobile': loc.is_mobile, 'geometry': json.loads(loc.geometry.json),", "False, 'geometry': {'type': 'Point', 'coordinates': [12.512124, 41.898903]}, } self.assertEqual(content1, expected)", "test_reverse_geocode_no_coords(self): self._login_as_admin() url = reverse('admin:django_loci_location_reverse_geocode_api') response = self.client.get(url) expected =", "{ 'choices': [ { 'id': str(fl.pk), 'str': str(fl), 'floor': fl.floor,", "round(response.json()['lng']) self.assertEqual(response.status_code, 200) self.assertEqual(response_lat, 56) self.assertEqual(response_lng, 38) def test_geocode_no_address(self): self._login_as_admin()", "response = self.client.get(url) expected = {'error': 'Address parameter not defined'}", "'image': fl.image.url, 'image_width': fl.image.width, 'image_height': fl.image.height, } ] } self.assertDictEqual(r.json(),", "= self._create_location(is_mobile=True, geometry=None) response = self.client.get( reverse('admin:django_loci_location_json', args=[loc.pk]) ) self.assertEqual(response.status_code,", "image os.remove(fl.image.path) url = reverse('{0}_floorplan_change'.format(self.url_prefix), args=[fl.pk]) r = self.client.get(url) self.assertContains(r,", "lng = 21 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng )", "str(fl.pk), 'str': str(fl), 'floor': fl.floor, 'image': fl.image.url, 'image_width': fl.image.width, 'image_height':", "= 21 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) #", "self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1', type='indoor')", "self._create_location() r = self.client.get(reverse('admin:django_loci_location_json', args=[loc.pk])) expected = { 'name': loc.name,", "os.remove(fl.image.path) url = reverse('{0}_location_change'.format(self.url_prefix), args=[loc.pk]) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1')", "test_floorplan_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1', type='indoor') fl = self._create_floorplan(location=loc) #", "self._create_floorplan(location=loc) # remove floorplan image os.remove(fl.image.path) url = reverse('{0}_location_change'.format(self.url_prefix), args=[loc.pk])", "= { 'name': loc.name, 'address': loc.address, 'type': loc.type, 'is_mobile': loc.is_mobile,", "= round(response.json()['lng']) self.assertEqual(response.status_code, 200) self.assertEqual(response_lat, 56) self.assertEqual(response_lng, 38) def test_geocode_no_address(self):", "self.assertContains(r, 'test-admin-location-1') def test_is_mobile_location_json_view(self): self._login_as_admin() loc = self._create_location(is_mobile=True, geometry=None) response", "json.loads(response1.content) expected = { 'name': 'location2', 'address': 'loc2 add', 'type':", "os import responses from django.urls import reverse from .. import", "] } self.assertDictEqual(r.json(), expected) def test_location_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1',", "52 lng = 21 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng", "-30 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) responses.add( responses.GET,", "= '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), address ) # Mock HTTP request to", "self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_is_mobile_location_json_view(self): self._login_as_admin() loc = self._create_location(is_mobile=True, geometry=None)", "reverse('admin:django_loci_location_geocode_api'), address ) # Mock HTTP request to the URL", "= self.client.get(url) self.assertContains(r, '1st floor') def test_location_json_view(self): self._login_as_admin() loc =", "Mock HTTP request to the URL to work offline responses.add(", "= self._create_floorplan() r = self.client.get( reverse('admin:django_loci_location_floorplans_json', args=[fl.location.pk]) ) expected =", "} ] } self.assertDictEqual(r.json(), expected) def test_location_change_image_removed(self): self._login_as_admin() loc =", "url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), address ) # Mock HTTP request", "def test_floorplan_list(self): self._login_as_admin() self._create_floorplan() self._create_location() url = reverse('{0}_floorplan_changelist'.format(self.url_prefix)) r =", "'address': 'loc2 add', 'type': 'outdoor', 'is_mobile': False, 'geometry': {'type': 'Point',", "@responses.activate def test_reverse_geocode(self): self._login_as_admin() lat = 52 lng = 21", "to the URL to work offline responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326', body=self._load_content('base/static/test-reverse-geocode.json'),", "'type': loc.type, 'is_mobile': loc.is_mobile, 'geometry': json.loads(loc.geometry.json), } self.assertDictEqual(r.json(), expected) def", "fl.image.url, 'image_width': fl.image.width, 'image_height': fl.image.height, } ] } self.assertDictEqual(r.json(), expected)", "'{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326', body=self._load_content( 'base/static/test-reverse-location-with-no-address.json'", "= json.loads(response.content) self.assertEqual(content['geometry'], None) loc1 = self._create_location( name='location2', address='loc2 add',", "found location with given name'} self.assertEqual(response.status_code, 404) self.assertEqual(response.json(), expected) @responses.activate", "request to the URL to work offline responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326',", ") responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326', body=self._load_content( 'base/static/test-reverse-location-with-no-address.json' ), content_type='application/json', ) response", "self._create_floorplan() r = self.client.get( reverse('admin:django_loci_location_floorplans_json', args=[fl.location.pk]) ) expected = {", "{'error': 'Not found location with given name'} self.assertEqual(response.status_code, 404) self.assertEqual(response.json(),", ") response = self.client.get(url) expected = {'error': 'Not found location", "= -30 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) responses.add(", "def test_reverse_geocode_no_coords(self): self._login_as_admin() url = reverse('admin:django_loci_location_reverse_geocode_api') response = self.client.get(url) expected", "url = reverse('{0}_floorplan_change'.format(self.url_prefix), args=[fl.pk]) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def", "import responses from django.urls import reverse from .. import TestAdminMixin,", "from django.urls import reverse from .. import TestAdminMixin, TestLociMixin class", "r = self.client.get(reverse('admin:django_loci_location_json', args=[loc.pk])) expected = { 'name': loc.name, 'address':", "response = self.client.get(url) expected = {'error': 'Not found location with", "address = 'Red Square' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), address )", "json.loads(loc.geometry.json), } self.assertDictEqual(r.json(), expected) def test_location_floorplan_json_view(self): self._login_as_admin() fl = self._create_floorplan()", "<filename>django_loci/tests/base/test_admin.py import json import os import responses from django.urls import", "content_type='application/json', ) response = self.client.get(url) response_address = response.json()['address'] self.assertEqual(response.status_code, 404)", "test_floorplan_list(self): self._login_as_admin() self._create_floorplan() self._create_location() url = reverse('{0}_floorplan_changelist'.format(self.url_prefix)) r = self.client.get(url)", "'loc2 add', 'type': 'outdoor', 'is_mobile': False, 'geometry': {'type': 'Point', 'coordinates':", "reverse('admin:django_loci_location_reverse_geocode_api') response = self.client.get(url) expected = {'error': 'lat or lng", "None) loc1 = self._create_location( name='location2', address='loc2 add', type='outdoor' ) response1", "content1 = json.loads(response1.content) expected = { 'name': 'location2', 'address': 'loc2", "self.client.get(url) expected = {'error': 'lat or lng parameter not defined'}", "'Point', 'coordinates': [12.512124, 41.898903]}, } self.assertEqual(content1, expected) @responses.activate def test_geocode(self):", "responses.GET, f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326', body=self._load_content('base/static/test-reverse-geocode.json'), content_type='application/json', ) response = self.client.get(url) self.assertEqual(response.status_code, 200)", "'is_mobile': False, 'geometry': {'type': 'Point', 'coordinates': [12.512124, 41.898903]}, } self.assertEqual(content1,", "self._login_as_admin() loc = self._create_location(name='test-admin-location-1', type='indoor') fl = self._create_floorplan(location=loc) # remove", "= self.client.get( reverse('admin:django_loci_location_json', args=[loc.pk]) ) self.assertEqual(response.status_code, 200) content = json.loads(response.content)", "200) self.assertEqual(response_lat, 56) self.assertEqual(response_lng, 38) def test_geocode_no_address(self): self._login_as_admin() url =", "200) content1 = json.loads(response1.content) expected = { 'name': 'location2', 'address':", "# Mock HTTP request to the URL to work offline", "geocode_url = 'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/' def test_location_list(self): self._login_as_admin() self._create_location(name='test-admin-location-1') url = reverse('{0}_location_changelist'.format(self.url_prefix))", "self.assertEqual(response_address, '') def test_reverse_geocode_no_coords(self): self._login_as_admin() url = reverse('admin:django_loci_location_reverse_geocode_api') response =", "# remove floorplan image os.remove(fl.image.path) url = reverse('{0}_floorplan_change'.format(self.url_prefix), args=[fl.pk]) r", "loc = self._create_location() r = self.client.get(reverse('admin:django_loci_location_json', args=[loc.pk])) expected = {", "type='outdoor' ) response1 = self.client.get( reverse('admin:django_loci_location_json', args=[loc1.pk]) ) self.assertEqual(response1.status_code, 200)", "url = reverse('{0}_location_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_list(self):", "url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), invalid_address ) responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc' '&f=json&maxLocations=1',", "expected) @responses.activate def test_reverse_geocode(self): self._login_as_admin() lat = 52 lng =", "= reverse('{0}_location_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_list(self): self._login_as_admin()", "self._login_as_admin() self._create_floorplan() self._create_location() url = reverse('{0}_floorplan_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r,", "work offline responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326', body=self._load_content('base/static/test-reverse-geocode.json'), content_type='application/json', ) response =", "content_type='application/json', ) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertContains(response, 'POL') @responses.activate", "fl = self._create_floorplan() r = self.client.get( reverse('admin:django_loci_location_floorplans_json', args=[fl.location.pk]) ) expected", "= self._create_floorplan(location=loc) # remove floorplan image os.remove(fl.image.path) url = reverse('{0}_location_change'.format(self.url_prefix),", "self.assertEqual(response.status_code, 200) self.assertContains(response, 'POL') @responses.activate def test_reverse_location_with_no_address(self): self._login_as_admin() lat =", "41.898903]}, } self.assertEqual(content1, expected) @responses.activate def test_geocode(self): self._login_as_admin() address =", "} self.assertEqual(content1, expected) @responses.activate def test_geocode(self): self._login_as_admin() address = 'Red", "r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_list(self): self._login_as_admin() self._create_floorplan() self._create_location()", "= reverse('admin:django_loci_location_geocode_api') response = self.client.get(url) expected = {'error': 'Address parameter", "[12.512124, 41.898903]}, } self.assertEqual(content1, expected) @responses.activate def test_geocode(self): self._login_as_admin() address", "= response.json()['address'] self.assertEqual(response.status_code, 404) self.assertEqual(response_address, '') def test_reverse_geocode_no_coords(self): self._login_as_admin() url", "test_location_floorplan_json_view(self): self._login_as_admin() fl = self._create_floorplan() r = self.client.get( reverse('admin:django_loci_location_floorplans_json', args=[fl.location.pk])", "responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc' '&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode-invalid-address.json'), content_type='application/json', ) response = self.client.get(url)", "reverse('{0}_location_change'.format(self.url_prefix), args=[loc.pk]) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_change_image_removed(self): self._login_as_admin()", "body=self._load_content('base/static/test-geocode-invalid-address.json'), content_type='application/json', ) response = self.client.get(url) expected = {'error': 'Not", "expected) def test_location_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1', type='indoor') fl =", "f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode.json'), content_type='application/json', ) response = self.client.get(url) response_lat = round(response.json()['lat'])", "body=self._load_content( 'base/static/test-reverse-location-with-no-address.json' ), content_type='application/json', ) response = self.client.get(url) response_address =", "reverse('admin:django_loci_location_geocode_api'), invalid_address ) responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc' '&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode-invalid-address.json'), content_type='application/json', )", "responses from django.urls import reverse from .. import TestAdminMixin, TestLociMixin", "fl.image.height, } ] } self.assertDictEqual(r.json(), expected) def test_location_change_image_removed(self): self._login_as_admin() loc", "21 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) # Mock", "= { 'name': 'location2', 'address': 'loc2 add', 'type': 'outdoor', 'is_mobile':", "self._create_floorplan(location=loc) # remove floorplan image os.remove(fl.image.path) url = reverse('{0}_floorplan_change'.format(self.url_prefix), args=[fl.pk])", "self.assertContains(r, 'test-admin-location-1') def test_floorplan_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1', type='indoor') fl", "URL to work offline responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326', body=self._load_content('base/static/test-reverse-geocode.json'), content_type='application/json', )", "= self.client.get( reverse('admin:django_loci_location_json', args=[loc1.pk]) ) self.assertEqual(response1.status_code, 200) content1 = json.loads(response1.content)", ".. import TestAdminMixin, TestLociMixin class BaseTestAdmin(TestAdminMixin, TestLociMixin): geocode_url = 'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/'", "response_lat = round(response.json()['lat']) response_lng = round(response.json()['lng']) self.assertEqual(response.status_code, 200) self.assertEqual(response_lat, 56)", "self._login_as_admin() loc = self._create_location(is_mobile=True, geometry=None) response = self.client.get( reverse('admin:django_loci_location_json', args=[loc.pk])", "with given name'} self.assertEqual(response.status_code, 404) self.assertEqual(response.json(), expected) @responses.activate def test_reverse_geocode(self):", "} self.assertDictEqual(r.json(), expected) def test_location_floorplan_json_view(self): self._login_as_admin() fl = self._create_floorplan() r", "# remove floorplan image os.remove(fl.image.path) url = reverse('{0}_location_change'.format(self.url_prefix), args=[loc.pk]) r", "self.assertEqual(response.json(), expected) @responses.activate def test_geocode_invalid_address(self): self._login_as_admin() invalid_address = 'thisaddressisnotvalid123abc' url", "r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_is_mobile_location_json_view(self): self._login_as_admin() loc =", "def test_location_list(self): self._login_as_admin() self._create_location(name='test-admin-location-1') url = reverse('{0}_location_changelist'.format(self.url_prefix)) r = self.client.get(url)", "floorplan image os.remove(fl.image.path) url = reverse('{0}_location_change'.format(self.url_prefix), args=[loc.pk]) r = self.client.get(url)", "r = self.client.get(url) self.assertContains(r, '1st floor') def test_location_json_view(self): self._login_as_admin() loc", "lng ) # Mock HTTP request to the URL to", "url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) # Mock HTTP", "self.assertEqual(response.status_code, 400) self.assertEqual(response.json(), expected) @responses.activate def test_geocode_invalid_address(self): self._login_as_admin() invalid_address =", "= self.client.get(url) expected = {'error': 'lat or lng parameter not", "TestLociMixin): geocode_url = 'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/' def test_location_list(self): self._login_as_admin() self._create_location(name='test-admin-location-1') url =", "reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326', body=self._load_content( 'base/static/test-reverse-location-with-no-address.json' ),", "), content_type='application/json', ) response = self.client.get(url) response_address = response.json()['address'] self.assertEqual(response.status_code,", "= '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) # Mock HTTP request", "reverse('{0}_floorplan_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r, '1st floor') def test_location_json_view(self): self._login_as_admin()", "= self.client.get(url) response_lat = round(response.json()['lat']) response_lng = round(response.json()['lng']) self.assertEqual(response.status_code, 200)", "lat = 52 lng = 21 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'),", "response1 = self.client.get( reverse('admin:django_loci_location_json', args=[loc1.pk]) ) self.assertEqual(response1.status_code, 200) content1 =", "to work offline responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326', body=self._load_content('base/static/test-reverse-geocode.json'), content_type='application/json', ) response", "= '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326', body=self._load_content(", "self.client.get(url) response_lat = round(response.json()['lat']) response_lng = round(response.json()['lng']) self.assertEqual(response.status_code, 200) self.assertEqual(response_lat,", "fl.image.width, 'image_height': fl.image.height, } ] } self.assertDictEqual(r.json(), expected) def test_location_change_image_removed(self):", "url = reverse('{0}_floorplan_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r, '1st floor') def", "test_location_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1', type='indoor') fl = self._create_floorplan(location=loc) #", "38) def test_geocode_no_address(self): self._login_as_admin() url = reverse('admin:django_loci_location_geocode_api') response = self.client.get(url)", "remove floorplan image os.remove(fl.image.path) url = reverse('{0}_floorplan_change'.format(self.url_prefix), args=[fl.pk]) r =", "def test_reverse_location_with_no_address(self): self._login_as_admin() lat = -30 lng = -30 url", "invalid_address = 'thisaddressisnotvalid123abc' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), invalid_address ) responses.add(", "{ 'name': 'location2', 'address': 'loc2 add', 'type': 'outdoor', 'is_mobile': False,", "parameter not defined'} self.assertEqual(response.status_code, 400) self.assertEqual(response.json(), expected) @responses.activate def test_geocode_invalid_address(self):", "'test-admin-location-1') def test_floorplan_list(self): self._login_as_admin() self._create_floorplan() self._create_location() url = reverse('{0}_floorplan_changelist'.format(self.url_prefix)) r", "str(fl), 'floor': fl.floor, 'image': fl.image.url, 'image_width': fl.image.width, 'image_height': fl.image.height, }", ") # Mock HTTP request to the URL to work", "loc.is_mobile, 'geometry': json.loads(loc.geometry.json), } self.assertDictEqual(r.json(), expected) def test_location_floorplan_json_view(self): self._login_as_admin() fl", "reverse('admin:django_loci_location_floorplans_json', args=[fl.location.pk]) ) expected = { 'choices': [ { 'id':", "self.client.get(url) expected = {'error': 'Not found location with given name'}", "work offline responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode.json'), content_type='application/json', ) response =", "@responses.activate def test_geocode(self): self._login_as_admin() address = 'Red Square' url =", "expected) @responses.activate def test_geocode_invalid_address(self): self._login_as_admin() invalid_address = 'thisaddressisnotvalid123abc' url =", "import os import responses from django.urls import reverse from ..", "= self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1',", "image os.remove(fl.image.path) url = reverse('{0}_location_change'.format(self.url_prefix), args=[loc.pk]) r = self.client.get(url) self.assertContains(r,", "= self._create_location( name='location2', address='loc2 add', type='outdoor' ) response1 = self.client.get(", "self._create_location() url = reverse('{0}_floorplan_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r, '1st floor')", "= json.loads(response1.content) expected = { 'name': 'location2', 'address': 'loc2 add',", "def test_location_json_view(self): self._login_as_admin() loc = self._create_location() r = self.client.get(reverse('admin:django_loci_location_json', args=[loc.pk]))", "expected = {'error': 'lat or lng parameter not defined'} self.assertEqual(response.status_code,", "name='location2', address='loc2 add', type='outdoor' ) response1 = self.client.get( reverse('admin:django_loci_location_json', args=[loc1.pk])", "= {'error': 'lat or lng parameter not defined'} self.assertEqual(response.status_code, 400)", "'Address parameter not defined'} self.assertEqual(response.status_code, 400) self.assertEqual(response.json(), expected) @responses.activate def", "f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326', body=self._load_content( 'base/static/test-reverse-location-with-no-address.json' ), content_type='application/json', ) response = self.client.get(url) response_address", "56) self.assertEqual(response_lng, 38) def test_geocode_no_address(self): self._login_as_admin() url = reverse('admin:django_loci_location_geocode_api') response", "self._create_location( name='location2', address='loc2 add', type='outdoor' ) response1 = self.client.get( reverse('admin:django_loci_location_json',", "self._create_floorplan() self._create_location() url = reverse('{0}_floorplan_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r, '1st", "self.assertEqual(response.status_code, 404) self.assertEqual(response.json(), expected) @responses.activate def test_reverse_geocode(self): self._login_as_admin() lat =", "'{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), invalid_address ) responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc' '&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode-invalid-address.json'), content_type='application/json',", "responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc' '&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode-invalid-address.json'), content_type='application/json', ) response = self.client.get(url) expected", "loc.name, 'address': loc.address, 'type': loc.type, 'is_mobile': loc.is_mobile, 'geometry': json.loads(loc.geometry.json), }", "reverse('{0}_location_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_list(self): self._login_as_admin() self._create_floorplan()", "reverse from .. import TestAdminMixin, TestLociMixin class BaseTestAdmin(TestAdminMixin, TestLociMixin): geocode_url", "} self.assertDictEqual(r.json(), expected) def test_location_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1', type='indoor')", "expected = { 'name': 'location2', 'address': 'loc2 add', 'type': 'outdoor',", "self._login_as_admin() lat = -30 lng = -30 url = '{0}?lat={1}&lng={2}'.format(", "def test_geocode_invalid_address(self): self._login_as_admin() invalid_address = 'thisaddressisnotvalid123abc' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'),", "'outdoor', 'is_mobile': False, 'geometry': {'type': 'Point', 'coordinates': [12.512124, 41.898903]}, }", "reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) # Mock HTTP request to the", ") response1 = self.client.get( reverse('admin:django_loci_location_json', args=[loc1.pk]) ) self.assertEqual(response1.status_code, 200) content1", "= self._create_location() r = self.client.get(reverse('admin:django_loci_location_json', args=[loc.pk])) expected = { 'name':", "TestAdminMixin, TestLociMixin class BaseTestAdmin(TestAdminMixin, TestLociMixin): geocode_url = 'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/' def test_location_list(self):", "'{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng ) # Mock HTTP request to", "location with given name'} self.assertEqual(response.status_code, 404) self.assertEqual(response.json(), expected) @responses.activate def", "'id': str(fl.pk), 'str': str(fl), 'floor': fl.floor, 'image': fl.image.url, 'image_width': fl.image.width,", "body=self._load_content('base/static/test-geocode.json'), content_type='application/json', ) response = self.client.get(url) response_lat = round(response.json()['lat']) response_lng", "'thisaddressisnotvalid123abc' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), invalid_address ) responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc'", "= {'error': 'Address parameter not defined'} self.assertEqual(response.status_code, 400) self.assertEqual(response.json(), expected)", "{'type': 'Point', 'coordinates': [12.512124, 41.898903]}, } self.assertEqual(content1, expected) @responses.activate def", "args=[loc.pk]) ) self.assertEqual(response.status_code, 200) content = json.loads(response.content) self.assertEqual(content['geometry'], None) loc1", "responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode.json'), content_type='application/json', ) response = self.client.get(url) response_lat =", "self.client.get(url) response_address = response.json()['address'] self.assertEqual(response.status_code, 404) self.assertEqual(response_address, '') def test_reverse_geocode_no_coords(self):", "response = self.client.get(url) expected = {'error': 'lat or lng parameter", "= self.client.get(url) response_address = response.json()['address'] self.assertEqual(response.status_code, 404) self.assertEqual(response_address, '') def", "loc1 = self._create_location( name='location2', address='loc2 add', type='outdoor' ) response1 =", "'Not found location with given name'} self.assertEqual(response.status_code, 404) self.assertEqual(response.json(), expected)", "geometry=None) response = self.client.get( reverse('admin:django_loci_location_json', args=[loc.pk]) ) self.assertEqual(response.status_code, 200) content", "from .. import TestAdminMixin, TestLociMixin class BaseTestAdmin(TestAdminMixin, TestLociMixin): geocode_url =", "'image_width': fl.image.width, 'image_height': fl.image.height, } ] } self.assertDictEqual(r.json(), expected) def", "self._create_location(name='test-admin-location-1') url = reverse('{0}_location_changelist'.format(self.url_prefix)) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def", "args=[loc.pk]) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_change_image_removed(self): self._login_as_admin() loc", "type='indoor') fl = self._create_floorplan(location=loc) # remove floorplan image os.remove(fl.image.path) url", "= { 'choices': [ { 'id': str(fl.pk), 'str': str(fl), 'floor':", "f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326', body=self._load_content('base/static/test-reverse-geocode.json'), content_type='application/json', ) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertContains(response,", "= reverse('admin:django_loci_location_reverse_geocode_api') response = self.client.get(url) expected = {'error': 'lat or", "def test_geocode_no_address(self): self._login_as_admin() url = reverse('admin:django_loci_location_geocode_api') response = self.client.get(url) expected", "expected) def test_location_floorplan_json_view(self): self._login_as_admin() fl = self._create_floorplan() r = self.client.get(", "= '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), invalid_address ) responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc' '&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode-invalid-address.json'),", "'type': 'outdoor', 'is_mobile': False, 'geometry': {'type': 'Point', 'coordinates': [12.512124, 41.898903]},", "{'error': 'Address parameter not defined'} self.assertEqual(response.status_code, 400) self.assertEqual(response.json(), expected) @responses.activate", "'geometry': json.loads(loc.geometry.json), } self.assertDictEqual(r.json(), expected) def test_location_floorplan_json_view(self): self._login_as_admin() fl =", "responses.GET, f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326', body=self._load_content( 'base/static/test-reverse-location-with-no-address.json' ), content_type='application/json', ) response = self.client.get(url)", "self.client.get(reverse('admin:django_loci_location_json', args=[loc.pk])) expected = { 'name': loc.name, 'address': loc.address, 'type':", "round(response.json()['lat']) response_lng = round(response.json()['lng']) self.assertEqual(response.status_code, 200) self.assertEqual(response_lat, 56) self.assertEqual(response_lng, 38)", "'str': str(fl), 'floor': fl.floor, 'image': fl.image.url, 'image_width': fl.image.width, 'image_height': fl.image.height,", "200) self.assertContains(response, 'POL') @responses.activate def test_reverse_location_with_no_address(self): self._login_as_admin() lat = -30", "@responses.activate def test_reverse_location_with_no_address(self): self._login_as_admin() lat = -30 lng = -30", "expected = { 'name': loc.name, 'address': loc.address, 'type': loc.type, 'is_mobile':", "self.client.get(url) self.assertEqual(response.status_code, 200) self.assertContains(response, 'POL') @responses.activate def test_reverse_location_with_no_address(self): self._login_as_admin() lat", "to the URL to work offline responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode.json'),", "self.client.get( reverse('admin:django_loci_location_json', args=[loc1.pk]) ) self.assertEqual(response1.status_code, 200) content1 = json.loads(response1.content) expected", "import TestAdminMixin, TestLociMixin class BaseTestAdmin(TestAdminMixin, TestLociMixin): geocode_url = 'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/' def", "'{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), address ) # Mock HTTP request to the", "test_reverse_location_with_no_address(self): self._login_as_admin() lat = -30 lng = -30 url =", "self._login_as_admin() loc = self._create_location() r = self.client.get(reverse('admin:django_loci_location_json', args=[loc.pk])) expected =", "response.json()['address'] self.assertEqual(response.status_code, 404) self.assertEqual(response_address, '') def test_reverse_geocode_no_coords(self): self._login_as_admin() url =", "def test_geocode(self): self._login_as_admin() address = 'Red Square' url = '{0}?address={1}'.format(", "self.client.get(url) expected = {'error': 'Address parameter not defined'} self.assertEqual(response.status_code, 400)", "self._create_location(name='test-admin-location-1', type='indoor') fl = self._create_floorplan(location=loc) # remove floorplan image os.remove(fl.image.path)", "'1st floor') def test_location_json_view(self): self._login_as_admin() loc = self._create_location() r =", "offline responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326', body=self._load_content('base/static/test-reverse-geocode.json'), content_type='application/json', ) response = self.client.get(url)", "test_reverse_geocode(self): self._login_as_admin() lat = 52 lng = 21 url =", "= reverse('{0}_floorplan_change'.format(self.url_prefix), args=[fl.pk]) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_is_mobile_location_json_view(self):", "self.assertContains(r, 'test-admin-location-1') def test_floorplan_list(self): self._login_as_admin() self._create_floorplan() self._create_location() url = reverse('{0}_floorplan_changelist'.format(self.url_prefix))", "defined'} self.assertEqual(response.status_code, 400) self.assertEqual(response.json(), expected) @responses.activate def test_geocode_invalid_address(self): self._login_as_admin() invalid_address", "{'error': 'lat or lng parameter not defined'} self.assertEqual(response.status_code, 400) self.assertEqual(response.json(),", "= 52 lng = 21 url = '{0}?lat={1}&lng={2}'.format( reverse('admin:django_loci_location_reverse_geocode_api'), lat,", "fl.floor, 'image': fl.image.url, 'image_width': fl.image.width, 'image_height': fl.image.height, } ] }", "content = json.loads(response.content) self.assertEqual(content['geometry'], None) loc1 = self._create_location( name='location2', address='loc2", "'name': 'location2', 'address': 'loc2 add', 'type': 'outdoor', 'is_mobile': False, 'geometry':", "self._login_as_admin() lat = 52 lng = 21 url = '{0}?lat={1}&lng={2}'.format(", "404) self.assertEqual(response_address, '') def test_reverse_geocode_no_coords(self): self._login_as_admin() url = reverse('admin:django_loci_location_reverse_geocode_api') response", "self.client.get(url) self.assertContains(r, 'test-admin-location-1') def test_floorplan_list(self): self._login_as_admin() self._create_floorplan() self._create_location() url =", "response = self.client.get(url) response_lat = round(response.json()['lat']) response_lng = round(response.json()['lng']) self.assertEqual(response.status_code,", "fl = self._create_floorplan(location=loc) # remove floorplan image os.remove(fl.image.path) url =", "= 'Red Square' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), address ) #", "args=[loc1.pk]) ) self.assertEqual(response1.status_code, 200) content1 = json.loads(response1.content) expected = {", "def test_reverse_geocode(self): self._login_as_admin() lat = 52 lng = 21 url", "self.assertEqual(response1.status_code, 200) content1 = json.loads(response1.content) expected = { 'name': 'location2',", "the URL to work offline responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode.json'), content_type='application/json',", "'') def test_reverse_geocode_no_coords(self): self._login_as_admin() url = reverse('admin:django_loci_location_reverse_geocode_api') response = self.client.get(url)", "self._login_as_admin() invalid_address = 'thisaddressisnotvalid123abc' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), invalid_address )", "= round(response.json()['lat']) response_lng = round(response.json()['lng']) self.assertEqual(response.status_code, 200) self.assertEqual(response_lat, 56) self.assertEqual(response_lng,", "400) self.assertEqual(response.json(), expected) @responses.activate def test_geocode_invalid_address(self): self._login_as_admin() invalid_address = 'thisaddressisnotvalid123abc'", "self.assertEqual(response_lng, 38) def test_geocode_no_address(self): self._login_as_admin() url = reverse('admin:django_loci_location_geocode_api') response =", "= 'thisaddressisnotvalid123abc' url = '{0}?address={1}'.format( reverse('admin:django_loci_location_geocode_api'), invalid_address ) responses.add( responses.GET,", "= self._create_location(name='test-admin-location-1', type='indoor') fl = self._create_floorplan(location=loc) # remove floorplan image", "self.assertDictEqual(r.json(), expected) def test_location_change_image_removed(self): self._login_as_admin() loc = self._create_location(name='test-admin-location-1', type='indoor') fl", "to work offline responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode.json'), content_type='application/json', ) response", "r = self.client.get( reverse('admin:django_loci_location_floorplans_json', args=[fl.location.pk]) ) expected = { 'choices':", "expected) @responses.activate def test_geocode(self): self._login_as_admin() address = 'Red Square' url", "expected = {'error': 'Address parameter not defined'} self.assertEqual(response.status_code, 400) self.assertEqual(response.json(),", "json import os import responses from django.urls import reverse from", "URL to work offline responses.add( responses.GET, f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1', body=self._load_content('base/static/test-geocode.json'), content_type='application/json', )", "url = reverse('{0}_location_change'.format(self.url_prefix), args=[loc.pk]) r = self.client.get(url) self.assertContains(r, 'test-admin-location-1') def", "response = self.client.get( reverse('admin:django_loci_location_json', args=[loc.pk]) ) self.assertEqual(response.status_code, 200) content =", "HTTP request to the URL to work offline responses.add( responses.GET,", "'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/' def test_location_list(self): self._login_as_admin() self._create_location(name='test-admin-location-1') url = reverse('{0}_location_changelist'.format(self.url_prefix)) r =", "url = reverse('admin:django_loci_location_reverse_geocode_api') response = self.client.get(url) expected = {'error': 'lat", "responses.add( responses.GET, f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326', body=self._load_content( 'base/static/test-reverse-location-with-no-address.json' ), content_type='application/json', ) response =", ") response = self.client.get(url) response_address = response.json()['address'] self.assertEqual(response.status_code, 404) self.assertEqual(response_address," ]
[ "C, 1, C) _, locations = torch.topk(cov_matrix, self.n_offsets, dim=1) delta", "height, width = x.size() x = x - x.mean(dim=1, keepdim=True)", "\"\"\" m_batchsize, C, height, width = x.size() x = x", "m_batchsize, C, height, width = x.size() cov_matrix = self.covariance_features(x).reshape(m_batchsize, C,", "= x.size() x = x - x.mean(dim=1, keepdim=True) / (x.std(dim=1,", "channel, n_offsets): super(dca_offsets_layer, self).__init__() self.channel = channel self.n_offsets = n_offsets", "torch.stack(self.n_offsets*[torch.arange(0, self.channel)], dim=0) delta = torch.stack(m_batchsize * [delta], dim=0) offsets", "C, height, width = x.size() x = x - x.mean(dim=1,", "height, width = x.size() cov_matrix = self.covariance_features(x).reshape(m_batchsize, C, 1, C)", "x): \"\"\" Takes in a feature map and returns the", "\"\"\" Takes in a feature map and returns the unnormalized", "__init__(self, channel, n_offsets): super(dca_offsets_layer, self).__init__() self.channel = channel self.n_offsets =", "self).__init__() self.channel = channel self.n_offsets = n_offsets def covariance_features(self, x):", "= channel self.n_offsets = n_offsets def covariance_features(self, x): \"\"\" Takes", "keepdim=True) + 1e-5) proj_query = x.view(m_batchsize, C, -1) proj_key =", "proj_query = x.view(m_batchsize, C, -1) proj_key = x.view(m_batchsize, C, -1).permute(0,", "_, locations = torch.topk(cov_matrix, self.n_offsets, dim=1) delta = torch.stack(self.n_offsets*[torch.arange(0, self.channel)],", "def __init__(self, channel, n_offsets): super(dca_offsets_layer, self).__init__() self.channel = channel self.n_offsets", "= self.covariance_features(x).reshape(m_batchsize, C, 1, C) _, locations = torch.topk(cov_matrix, self.n_offsets,", "in a feature map and returns the unnormalized covariance matrix", "import rearrange, reduce, repeat class dca_offsets_layer(nn.Module): \"\"\"Constructs a Offset Generation", "energy def forward(self, x): m_batchsize, C, height, width = x.size()", "delta = torch.stack(self.n_offsets*[torch.arange(0, self.channel)], dim=0) delta = torch.stack(m_batchsize * [delta],", "from torch.nn.parameter import Parameter from einops import rearrange, reduce, repeat", "cov_matrix = self.covariance_features(x).reshape(m_batchsize, C, 1, C) _, locations = torch.topk(cov_matrix,", "x - x.mean(dim=1, keepdim=True) / (x.std(dim=1, keepdim=True) + 1e-5) proj_query", "-1) proj_key = x.view(m_batchsize, C, -1).permute(0, 2, 1) energy =", "def covariance_features(self, x): \"\"\" Takes in a feature map and", "/ (x.std(dim=1, keepdim=True) + 1e-5) proj_query = x.view(m_batchsize, C, -1)", "returns the unnormalized covariance matrix \"\"\" m_batchsize, C, height, width", "width = x.size() x = x - x.mean(dim=1, keepdim=True) /", "= torch.bmm(proj_query, proj_key) return energy def forward(self, x): m_batchsize, C,", "C, -1) proj_key = x.view(m_batchsize, C, -1).permute(0, 2, 1) energy", "(x.std(dim=1, keepdim=True) + 1e-5) proj_query = x.view(m_batchsize, C, -1) proj_key", "Generation module. \"\"\" def __init__(self, channel, n_offsets): super(dca_offsets_layer, self).__init__() self.channel", "\"\"\"Constructs a Offset Generation module. \"\"\" def __init__(self, channel, n_offsets):", "= x - x.mean(dim=1, keepdim=True) / (x.std(dim=1, keepdim=True) + 1e-5)", "C, -1).permute(0, 2, 1) energy = torch.bmm(proj_query, proj_key) return energy", "covariance matrix \"\"\" m_batchsize, C, height, width = x.size() x", "self.channel = channel self.n_offsets = n_offsets def covariance_features(self, x): \"\"\"", "width = x.size() cov_matrix = self.covariance_features(x).reshape(m_batchsize, C, 1, C) _,", "Parameter from einops import rearrange, reduce, repeat class dca_offsets_layer(nn.Module): \"\"\"Constructs", "forward(self, x): m_batchsize, C, height, width = x.size() cov_matrix =", "return energy def forward(self, x): m_batchsize, C, height, width =", "repeat class dca_offsets_layer(nn.Module): \"\"\"Constructs a Offset Generation module. \"\"\" def", "matrix \"\"\" m_batchsize, C, height, width = x.size() x =", "+ 1e-5) proj_query = x.view(m_batchsize, C, -1) proj_key = x.view(m_batchsize,", "dim=0) delta = torch.stack(m_batchsize * [delta], dim=0) offsets = locations.squeeze()", "x.size() x = x - x.mean(dim=1, keepdim=True) / (x.std(dim=1, keepdim=True)", "x.view(m_batchsize, C, -1) proj_key = x.view(m_batchsize, C, -1).permute(0, 2, 1)", "dim=1) delta = torch.stack(self.n_offsets*[torch.arange(0, self.channel)], dim=0) delta = torch.stack(m_batchsize *", "* [delta], dim=0) offsets = locations.squeeze() - delta.cuda() return offsets", "self.covariance_features(x).reshape(m_batchsize, C, 1, C) _, locations = torch.topk(cov_matrix, self.n_offsets, dim=1)", "energy = torch.bmm(proj_query, proj_key) return energy def forward(self, x): m_batchsize,", "unnormalized covariance matrix \"\"\" m_batchsize, C, height, width = x.size()", "covariance_features(self, x): \"\"\" Takes in a feature map and returns", "locations = torch.topk(cov_matrix, self.n_offsets, dim=1) delta = torch.stack(self.n_offsets*[torch.arange(0, self.channel)], dim=0)", "2, 1) energy = torch.bmm(proj_query, proj_key) return energy def forward(self,", "import Parameter from einops import rearrange, reduce, repeat class dca_offsets_layer(nn.Module):", "x = x - x.mean(dim=1, keepdim=True) / (x.std(dim=1, keepdim=True) +", "self.n_offsets, dim=1) delta = torch.stack(self.n_offsets*[torch.arange(0, self.channel)], dim=0) delta = torch.stack(m_batchsize", "the unnormalized covariance matrix \"\"\" m_batchsize, C, height, width =", "proj_key) return energy def forward(self, x): m_batchsize, C, height, width", "- x.mean(dim=1, keepdim=True) / (x.std(dim=1, keepdim=True) + 1e-5) proj_query =", "reduce, repeat class dca_offsets_layer(nn.Module): \"\"\"Constructs a Offset Generation module. \"\"\"", "x.view(m_batchsize, C, -1).permute(0, 2, 1) energy = torch.bmm(proj_query, proj_key) return", "proj_key = x.view(m_batchsize, C, -1).permute(0, 2, 1) energy = torch.bmm(proj_query,", "import torch from torch import nn from torch.nn.parameter import Parameter", "x.mean(dim=1, keepdim=True) / (x.std(dim=1, keepdim=True) + 1e-5) proj_query = x.view(m_batchsize,", "m_batchsize, C, height, width = x.size() x = x -", "dca_offsets_layer(nn.Module): \"\"\"Constructs a Offset Generation module. \"\"\" def __init__(self, channel,", "super(dca_offsets_layer, self).__init__() self.channel = channel self.n_offsets = n_offsets def covariance_features(self,", "a feature map and returns the unnormalized covariance matrix \"\"\"", "class dca_offsets_layer(nn.Module): \"\"\"Constructs a Offset Generation module. \"\"\" def __init__(self,", "from torch import nn from torch.nn.parameter import Parameter from einops", "feature map and returns the unnormalized covariance matrix \"\"\" m_batchsize,", "torch.bmm(proj_query, proj_key) return energy def forward(self, x): m_batchsize, C, height,", "self.channel)], dim=0) delta = torch.stack(m_batchsize * [delta], dim=0) offsets =", "def forward(self, x): m_batchsize, C, height, width = x.size() cov_matrix", "x.size() cov_matrix = self.covariance_features(x).reshape(m_batchsize, C, 1, C) _, locations =", "a Offset Generation module. \"\"\" def __init__(self, channel, n_offsets): super(dca_offsets_layer,", "n_offsets def covariance_features(self, x): \"\"\" Takes in a feature map", "= x.view(m_batchsize, C, -1).permute(0, 2, 1) energy = torch.bmm(proj_query, proj_key)", "self.n_offsets = n_offsets def covariance_features(self, x): \"\"\" Takes in a", "= torch.stack(self.n_offsets*[torch.arange(0, self.channel)], dim=0) delta = torch.stack(m_batchsize * [delta], dim=0)", "channel self.n_offsets = n_offsets def covariance_features(self, x): \"\"\" Takes in", "from einops import rearrange, reduce, repeat class dca_offsets_layer(nn.Module): \"\"\"Constructs a", "torch.stack(m_batchsize * [delta], dim=0) offsets = locations.squeeze() - delta.cuda() return", "torch.topk(cov_matrix, self.n_offsets, dim=1) delta = torch.stack(self.n_offsets*[torch.arange(0, self.channel)], dim=0) delta =", "Offset Generation module. \"\"\" def __init__(self, channel, n_offsets): super(dca_offsets_layer, self).__init__()", "1) energy = torch.bmm(proj_query, proj_key) return energy def forward(self, x):", "rearrange, reduce, repeat class dca_offsets_layer(nn.Module): \"\"\"Constructs a Offset Generation module.", "-1).permute(0, 2, 1) energy = torch.bmm(proj_query, proj_key) return energy def", "= x.view(m_batchsize, C, -1) proj_key = x.view(m_batchsize, C, -1).permute(0, 2,", "n_offsets): super(dca_offsets_layer, self).__init__() self.channel = channel self.n_offsets = n_offsets def", "torch.nn.parameter import Parameter from einops import rearrange, reduce, repeat class", "import nn from torch.nn.parameter import Parameter from einops import rearrange,", "torch from torch import nn from torch.nn.parameter import Parameter from", "keepdim=True) / (x.std(dim=1, keepdim=True) + 1e-5) proj_query = x.view(m_batchsize, C,", "einops import rearrange, reduce, repeat class dca_offsets_layer(nn.Module): \"\"\"Constructs a Offset", "module. \"\"\" def __init__(self, channel, n_offsets): super(dca_offsets_layer, self).__init__() self.channel =", "torch import nn from torch.nn.parameter import Parameter from einops import", "and returns the unnormalized covariance matrix \"\"\" m_batchsize, C, height,", "nn from torch.nn.parameter import Parameter from einops import rearrange, reduce,", "C, height, width = x.size() cov_matrix = self.covariance_features(x).reshape(m_batchsize, C, 1,", "= x.size() cov_matrix = self.covariance_features(x).reshape(m_batchsize, C, 1, C) _, locations", "Takes in a feature map and returns the unnormalized covariance", "1e-5) proj_query = x.view(m_batchsize, C, -1) proj_key = x.view(m_batchsize, C,", "= torch.topk(cov_matrix, self.n_offsets, dim=1) delta = torch.stack(self.n_offsets*[torch.arange(0, self.channel)], dim=0) delta", "\"\"\" def __init__(self, channel, n_offsets): super(dca_offsets_layer, self).__init__() self.channel = channel", "x): m_batchsize, C, height, width = x.size() cov_matrix = self.covariance_features(x).reshape(m_batchsize,", "map and returns the unnormalized covariance matrix \"\"\" m_batchsize, C,", "delta = torch.stack(m_batchsize * [delta], dim=0) offsets = locations.squeeze() -", "= torch.stack(m_batchsize * [delta], dim=0) offsets = locations.squeeze() - delta.cuda()", "C) _, locations = torch.topk(cov_matrix, self.n_offsets, dim=1) delta = torch.stack(self.n_offsets*[torch.arange(0,", "1, C) _, locations = torch.topk(cov_matrix, self.n_offsets, dim=1) delta =", "= n_offsets def covariance_features(self, x): \"\"\" Takes in a feature" ]
[ ". import test_image_opener from . import test_image_metrick from . import", "import test_image_opener from . import test_image_metrick from . import test_compare_tools", ". import test_image_metrick from . import test_compare_tools from . import", "from . import test_helpers from . import test_image_opener from .", "test_image_opener from . import test_image_metrick from . import test_compare_tools from", ". import test_helpers from . import test_image_opener from . import", "from . import test_image_opener from . import test_image_metrick from .", "test_helpers from . import test_image_opener from . import test_image_metrick from", "import test_image_metrick from . import test_compare_tools from . import test_compare_api", "import test_helpers from . import test_image_opener from . import test_image_metrick", "from . import test_image_metrick from . import test_compare_tools from ." ]
[ "admin from django.urls import path from .views import index, email,", "path from .views import index, email, post_detail, posts, hot_takes, take_detail", "hot_takes, take_detail from . import views app_name = \"core\" urlpatterns", "import views app_name = \"core\" urlpatterns = [ path('',views.index,name=\"index\"), path('email/',views.email,name=\"email\"),", "views app_name = \"core\" urlpatterns = [ path('',views.index,name=\"index\"), path('email/',views.email,name=\"email\"), path('post/<slug>/',views.post_detail,name='post'),", "import path from .views import index, email, post_detail, posts, hot_takes,", ".views import index, email, post_detail, posts, hot_takes, take_detail from .", "urlpatterns = [ path('',views.index,name=\"index\"), path('email/',views.email,name=\"email\"), path('post/<slug>/',views.post_detail,name='post'), path('posts/',views.posts,name='posts'), path('takes/',views.hot_takes,name='takes'), path('take/<slug>/',views.take_detail,name='take'), ]", "import index, email, post_detail, posts, hot_takes, take_detail from . import", "django.contrib import admin from django.urls import path from .views import", "email, post_detail, posts, hot_takes, take_detail from . import views app_name", "from django.contrib import admin from django.urls import path from .views", "django.urls import path from .views import index, email, post_detail, posts,", "posts, hot_takes, take_detail from . import views app_name = \"core\"", "from django.urls import path from .views import index, email, post_detail,", "= \"core\" urlpatterns = [ path('',views.index,name=\"index\"), path('email/',views.email,name=\"email\"), path('post/<slug>/',views.post_detail,name='post'), path('posts/',views.posts,name='posts'), path('takes/',views.hot_takes,name='takes'),", "from . import views app_name = \"core\" urlpatterns = [", "\"core\" urlpatterns = [ path('',views.index,name=\"index\"), path('email/',views.email,name=\"email\"), path('post/<slug>/',views.post_detail,name='post'), path('posts/',views.posts,name='posts'), path('takes/',views.hot_takes,name='takes'), path('take/<slug>/',views.take_detail,name='take'),", "import admin from django.urls import path from .views import index,", "take_detail from . import views app_name = \"core\" urlpatterns =", "<filename>core/urls.py from django.contrib import admin from django.urls import path from", "app_name = \"core\" urlpatterns = [ path('',views.index,name=\"index\"), path('email/',views.email,name=\"email\"), path('post/<slug>/',views.post_detail,name='post'), path('posts/',views.posts,name='posts'),", ". import views app_name = \"core\" urlpatterns = [ path('',views.index,name=\"index\"),", "post_detail, posts, hot_takes, take_detail from . import views app_name =", "index, email, post_detail, posts, hot_takes, take_detail from . import views", "from .views import index, email, post_detail, posts, hot_takes, take_detail from" ]
[]
[ "getenv(\"CASS_DB\") cass_password = getenv(\"CASS_PASSWORD\") cass_user = getenv(\"CASS_USER\") def create_db_con(): auth_provider", "model.storage import * from model.project import * from model.network import", "PlainTextAuthProvider from cassandra.cqlengine import connection from cassandra.cqlengine.management import sync_table from", "* from model.disk import * from model.storage import * from", "from model.network import * from model.user import * load_dotenv() cass_db", "= PlainTextAuthProvider(username=cass_user, password=<PASSWORD>) cluster = Cluster([cass_db],auth_provider=auth_provider) session = cluster.connect() session.execute(\"\"\"", "password=<PASSWORD>) cluster = Cluster([cass_db],auth_provider=auth_provider) session = cluster.connect() session.execute(\"\"\" CREATE KEYSPACE", "from cassandra.cqlengine import connection from cassandra.cqlengine.management import sync_table from cassandra.query", "INDEX IF NOT EXISTS ON blue_print (network);\") session.execute(\"CREATE INDEX IF", "CREATE KEYSPACE IF NOT EXISTS migration WITH replication = {", "connection from cassandra.cqlengine.management import sync_table from cassandra.query import ordered_dict_factory from", "cassandra.cqlengine.management import sync_table from cassandra.query import ordered_dict_factory from model.discover import", "import * from model.project import * from model.network import *", "from model.project import * from model.network import * from model.user", "mongoengine import * from dotenv import load_dotenv from os import", "getenv(\"CASS_PASSWORD\") cass_user = getenv(\"CASS_USER\") def create_db_con(): auth_provider = PlainTextAuthProvider(username=cass_user, password=<PASSWORD>)", "load_dotenv() cass_db = getenv(\"CASS_DB\") cass_password = getenv(\"CASS_PASSWORD\") cass_user = getenv(\"CASS_USER\")", "import * from model.user import * load_dotenv() cass_db = getenv(\"CASS_DB\")", "sync_table(User) sync_table(Disk) session.execute(\"CREATE INDEX IF NOT EXISTS ON blue_print (network);\")", "session.row_factory = ordered_dict_factory connection.setup([cass_db], \"migration\",protocol_version=3,auth_provider=auth_provider) sync_table(BluePrint) sync_table(Discover) sync_table(Project) sync_table(Network) sync_table(Subnet)", "'class': 'SimpleStrategy', 'replication_factor': '2' } \"\"\") session.set_keyspace('migration') session.row_factory = ordered_dict_factory", "from model.storage import * from model.project import * from model.network", "from model.disk import * from model.storage import * from model.project", "sync_table(Network) sync_table(Subnet) sync_table(Storage) sync_table(Bucket) sync_table(GcpBucket) sync_table(User) sync_table(Disk) session.execute(\"CREATE INDEX IF", "import ordered_dict_factory from model.discover import * from model.blueprint import *", "NOT EXISTS migration WITH replication = { 'class': 'SimpleStrategy', 'replication_factor':", "IF NOT EXISTS migration WITH replication = { 'class': 'SimpleStrategy',", "model.user import * load_dotenv() cass_db = getenv(\"CASS_DB\") cass_password = getenv(\"CASS_PASSWORD\")", "* from model.user import * load_dotenv() cass_db = getenv(\"CASS_DB\") cass_password", "session.execute(\"CREATE INDEX IF NOT EXISTS ON blue_print (subnet);\") return session", "* from model.storage import * from model.project import * from", "def create_db_con(): auth_provider = PlainTextAuthProvider(username=cass_user, password=<PASSWORD>) cluster = Cluster([cass_db],auth_provider=auth_provider) session", "IF NOT EXISTS ON blue_print (network);\") session.execute(\"CREATE INDEX IF NOT", "* from model.network import * from model.user import * load_dotenv()", "from model.blueprint import * from model.disk import * from model.storage", "cass_password = getenv(\"CASS_PASSWORD\") cass_user = getenv(\"CASS_USER\") def create_db_con(): auth_provider =", "import * from model.storage import * from model.project import *", "import connection from cassandra.cqlengine.management import sync_table from cassandra.query import ordered_dict_factory", "import * from model.network import * from model.user import *", "sync_table(Bucket) sync_table(GcpBucket) sync_table(User) sync_table(Disk) session.execute(\"CREATE INDEX IF NOT EXISTS ON", "ordered_dict_factory connection.setup([cass_db], \"migration\",protocol_version=3,auth_provider=auth_provider) sync_table(BluePrint) sync_table(Discover) sync_table(Project) sync_table(Network) sync_table(Subnet) sync_table(Storage) sync_table(Bucket)", "= ordered_dict_factory connection.setup([cass_db], \"migration\",protocol_version=3,auth_provider=auth_provider) sync_table(BluePrint) sync_table(Discover) sync_table(Project) sync_table(Network) sync_table(Subnet) sync_table(Storage)", "cassandra.auth import PlainTextAuthProvider from cassandra.cqlengine import connection from cassandra.cqlengine.management import", "getenv from cassandra.cluster import Cluster from cassandra.auth import PlainTextAuthProvider from", "Cluster([cass_db],auth_provider=auth_provider) session = cluster.connect() session.execute(\"\"\" CREATE KEYSPACE IF NOT EXISTS", "from cassandra.cqlengine.management import sync_table from cassandra.query import ordered_dict_factory from model.discover", "from cassandra.cluster import Cluster from cassandra.auth import PlainTextAuthProvider from cassandra.cqlengine", "'2' } \"\"\") session.set_keyspace('migration') session.row_factory = ordered_dict_factory connection.setup([cass_db], \"migration\",protocol_version=3,auth_provider=auth_provider) sync_table(BluePrint)", "from model.user import * load_dotenv() cass_db = getenv(\"CASS_DB\") cass_password =", "= cluster.connect() session.execute(\"\"\" CREATE KEYSPACE IF NOT EXISTS migration WITH", "model.disk import * from model.storage import * from model.project import", "EXISTS migration WITH replication = { 'class': 'SimpleStrategy', 'replication_factor': '2'", "from os import getenv from cassandra.cluster import Cluster from cassandra.auth", "cass_user = getenv(\"CASS_USER\") def create_db_con(): auth_provider = PlainTextAuthProvider(username=cass_user, password=<PASSWORD>) cluster", "dotenv import load_dotenv from os import getenv from cassandra.cluster import", "getenv(\"CASS_USER\") def create_db_con(): auth_provider = PlainTextAuthProvider(username=cass_user, password=<PASSWORD>) cluster = Cluster([cass_db],auth_provider=auth_provider)", "ordered_dict_factory from model.discover import * from model.blueprint import * from", "from model.discover import * from model.blueprint import * from model.disk", "PlainTextAuthProvider(username=cass_user, password=<PASSWORD>) cluster = Cluster([cass_db],auth_provider=auth_provider) session = cluster.connect() session.execute(\"\"\" CREATE", "cluster.connect() session.execute(\"\"\" CREATE KEYSPACE IF NOT EXISTS migration WITH replication", "cassandra.query import ordered_dict_factory from model.discover import * from model.blueprint import", "sync_table(Disk) session.execute(\"CREATE INDEX IF NOT EXISTS ON blue_print (network);\") session.execute(\"CREATE", "cassandra.cqlengine import connection from cassandra.cqlengine.management import sync_table from cassandra.query import", "session.execute(\"\"\" CREATE KEYSPACE IF NOT EXISTS migration WITH replication =", "} \"\"\") session.set_keyspace('migration') session.row_factory = ordered_dict_factory connection.setup([cass_db], \"migration\",protocol_version=3,auth_provider=auth_provider) sync_table(BluePrint) sync_table(Discover)", "= Cluster([cass_db],auth_provider=auth_provider) session = cluster.connect() session.execute(\"\"\" CREATE KEYSPACE IF NOT", "session = cluster.connect() session.execute(\"\"\" CREATE KEYSPACE IF NOT EXISTS migration", "\"\"\") session.set_keyspace('migration') session.row_factory = ordered_dict_factory connection.setup([cass_db], \"migration\",protocol_version=3,auth_provider=auth_provider) sync_table(BluePrint) sync_table(Discover) sync_table(Project)", "sync_table(BluePrint) sync_table(Discover) sync_table(Project) sync_table(Network) sync_table(Subnet) sync_table(Storage) sync_table(Bucket) sync_table(GcpBucket) sync_table(User) sync_table(Disk)", "sync_table from cassandra.query import ordered_dict_factory from model.discover import * from", "sync_table(GcpBucket) sync_table(User) sync_table(Disk) session.execute(\"CREATE INDEX IF NOT EXISTS ON blue_print", "WITH replication = { 'class': 'SimpleStrategy', 'replication_factor': '2' } \"\"\")", "from cassandra.query import ordered_dict_factory from model.discover import * from model.blueprint", "'SimpleStrategy', 'replication_factor': '2' } \"\"\") session.set_keyspace('migration') session.row_factory = ordered_dict_factory connection.setup([cass_db],", "model.blueprint import * from model.disk import * from model.storage import", "* load_dotenv() cass_db = getenv(\"CASS_DB\") cass_password = getenv(\"CASS_PASSWORD\") cass_user =", "{ 'class': 'SimpleStrategy', 'replication_factor': '2' } \"\"\") session.set_keyspace('migration') session.row_factory =", "session.set_keyspace('migration') session.row_factory = ordered_dict_factory connection.setup([cass_db], \"migration\",protocol_version=3,auth_provider=auth_provider) sync_table(BluePrint) sync_table(Discover) sync_table(Project) sync_table(Network)", "migration WITH replication = { 'class': 'SimpleStrategy', 'replication_factor': '2' }", "model.network import * from model.user import * load_dotenv() cass_db =", "NOT EXISTS ON blue_print (network);\") session.execute(\"CREATE INDEX IF NOT EXISTS", "cassandra.cluster import Cluster from cassandra.auth import PlainTextAuthProvider from cassandra.cqlengine import", "* from model.project import * from model.network import * from", "import getenv from cassandra.cluster import Cluster from cassandra.auth import PlainTextAuthProvider", "(network);\") session.execute(\"CREATE INDEX IF NOT EXISTS ON blue_print (subnet);\") return", "* from dotenv import load_dotenv from os import getenv from", "'replication_factor': '2' } \"\"\") session.set_keyspace('migration') session.row_factory = ordered_dict_factory connection.setup([cass_db], \"migration\",protocol_version=3,auth_provider=auth_provider)", "cass_db = getenv(\"CASS_DB\") cass_password = getenv(\"CASS_PASSWORD\") cass_user = getenv(\"CASS_USER\") def", "replication = { 'class': 'SimpleStrategy', 'replication_factor': '2' } \"\"\") session.set_keyspace('migration')", "connection.setup([cass_db], \"migration\",protocol_version=3,auth_provider=auth_provider) sync_table(BluePrint) sync_table(Discover) sync_table(Project) sync_table(Network) sync_table(Subnet) sync_table(Storage) sync_table(Bucket) sync_table(GcpBucket)", "from dotenv import load_dotenv from os import getenv from cassandra.cluster", "auth_provider = PlainTextAuthProvider(username=cass_user, password=<PASSWORD>) cluster = Cluster([cass_db],auth_provider=auth_provider) session = cluster.connect()", "KEYSPACE IF NOT EXISTS migration WITH replication = { 'class':", "create_db_con(): auth_provider = PlainTextAuthProvider(username=cass_user, password=<PASSWORD>) cluster = Cluster([cass_db],auth_provider=auth_provider) session =", "from cassandra.auth import PlainTextAuthProvider from cassandra.cqlengine import connection from cassandra.cqlengine.management", "Cluster from cassandra.auth import PlainTextAuthProvider from cassandra.cqlengine import connection from", "model.project import * from model.network import * from model.user import", "= { 'class': 'SimpleStrategy', 'replication_factor': '2' } \"\"\") session.set_keyspace('migration') session.row_factory", "import PlainTextAuthProvider from cassandra.cqlengine import connection from cassandra.cqlengine.management import sync_table", "cluster = Cluster([cass_db],auth_provider=auth_provider) session = cluster.connect() session.execute(\"\"\" CREATE KEYSPACE IF", "import Cluster from cassandra.auth import PlainTextAuthProvider from cassandra.cqlengine import connection", "sync_table(Storage) sync_table(Bucket) sync_table(GcpBucket) sync_table(User) sync_table(Disk) session.execute(\"CREATE INDEX IF NOT EXISTS", "os import getenv from cassandra.cluster import Cluster from cassandra.auth import", "* from model.blueprint import * from model.disk import * from", "\"migration\",protocol_version=3,auth_provider=auth_provider) sync_table(BluePrint) sync_table(Discover) sync_table(Project) sync_table(Network) sync_table(Subnet) sync_table(Storage) sync_table(Bucket) sync_table(GcpBucket) sync_table(User)", "import * load_dotenv() cass_db = getenv(\"CASS_DB\") cass_password = getenv(\"CASS_PASSWORD\") cass_user", "load_dotenv from os import getenv from cassandra.cluster import Cluster from", "sync_table(Project) sync_table(Network) sync_table(Subnet) sync_table(Storage) sync_table(Bucket) sync_table(GcpBucket) sync_table(User) sync_table(Disk) session.execute(\"CREATE INDEX", "EXISTS ON blue_print (network);\") session.execute(\"CREATE INDEX IF NOT EXISTS ON", "import * from model.blueprint import * from model.disk import *", "sync_table(Subnet) sync_table(Storage) sync_table(Bucket) sync_table(GcpBucket) sync_table(User) sync_table(Disk) session.execute(\"CREATE INDEX IF NOT", "= getenv(\"CASS_DB\") cass_password = getenv(\"CASS_PASSWORD\") cass_user = getenv(\"CASS_USER\") def create_db_con():", "import * from dotenv import load_dotenv from os import getenv", "blue_print (network);\") session.execute(\"CREATE INDEX IF NOT EXISTS ON blue_print (subnet);\")", "from mongoengine import * from dotenv import load_dotenv from os", "import load_dotenv from os import getenv from cassandra.cluster import Cluster", "sync_table(Discover) sync_table(Project) sync_table(Network) sync_table(Subnet) sync_table(Storage) sync_table(Bucket) sync_table(GcpBucket) sync_table(User) sync_table(Disk) session.execute(\"CREATE", "model.discover import * from model.blueprint import * from model.disk import", "import sync_table from cassandra.query import ordered_dict_factory from model.discover import *", "= getenv(\"CASS_PASSWORD\") cass_user = getenv(\"CASS_USER\") def create_db_con(): auth_provider = PlainTextAuthProvider(username=cass_user,", "session.execute(\"CREATE INDEX IF NOT EXISTS ON blue_print (network);\") session.execute(\"CREATE INDEX", "ON blue_print (network);\") session.execute(\"CREATE INDEX IF NOT EXISTS ON blue_print", "import * from model.disk import * from model.storage import *", "= getenv(\"CASS_USER\") def create_db_con(): auth_provider = PlainTextAuthProvider(username=cass_user, password=<PASSWORD>) cluster =" ]
[ "= torch_reqs.get(\"min_version\", None) if min_version is not None: if TORCH_VERSION", "critical from ..logger import traceback_and_raise from .misc import create_union_ast class", "attr=torchvision_ast.attrs[\"torchvision\"]) # let the misc creation be always the last,", "attr=torch_ast.attrs[\"torch\"]) lib_ast.add_attr(attr_name=\"torchvision\", attr=torchvision_ast.attrs[\"torchvision\"]) # let the misc creation be always", "\"lib\") misc_root = getattr(getattr(lib_ast, \"syft\"), \"lib\") misc_root.add_attr(attr_name=\"misc\", attr=union_misc_ast.attrs[\"misc\"]) return lib_ast", "None) if min_version is not None: if TORCH_VERSION < version.parse(min_version):", "= create_numpy_ast() lib_ast = Globals(client=client) lib_ast.add_attr(attr_name=\"syft\", attr=python_ast.attrs[\"syft\"]) lib_ast.add_attr(attr_name=\"torch\", attr=torch_ast.attrs[\"torch\"]) lib_ast.add_attr(attr_name=\"torchvision\",", "is not None and vendor_requirements_available( vendor_requirements=PACKAGE_SUPPORT ): update_ast = getattr(vendor_ast,", "..lib.torch import create_torch_ast from ..lib.torchvision import create_torchvision_ast from ..logger import", "True def load_lib(lib: str, options: TypeDict[str, TypeAny] = {}) ->", "< {min_version}\" ) ) return True def load_lib(lib: str, options:", "traceback_and_raise( VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\" + f\"Python: {PYTHON_VERSION} <", "update_ast(ast_or_client=client) # cache the constructor for future created clients lib_ast.loaded_lib_constructors[lib]", "lib_ast = Globals(client=client) lib_ast.add_attr(attr_name=\"syft\", attr=python_ast.attrs[\"syft\"]) lib_ast.add_attr(attr_name=\"torch\", attr=torch_ast.attrs[\"torch\"]) lib_ast.add_attr(attr_name=\"torchvision\", attr=torchvision_ast.attrs[\"torchvision\"]) #", "PACKAGE_SUPPORT = getattr(vendor_ast, \"PACKAGE_SUPPORT\", None) PACKAGE_SUPPORT.update(options) if PACKAGE_SUPPORT is not", "the relevant frameworks onto the node def create_lib_ast(client: Optional[Any] =", "not None: global lib_ast update_ast(ast_or_client=lib_ast) for _, client in lib_ast.registered_clients.items():", "Any from typing import Any as TypeAny from typing import", "TypeAny] = {}) -> None: try: _ = importlib.import_module(lib) vendor_ast", "frameworks onto the node def create_lib_ast(client: Optional[Any] = None) ->", "-> Globals: python_ast = create_python_ast(client=client) torch_ast = create_torch_ast(client=client) torchvision_ast =", "TORCH_VERSION < version.parse(min_version): traceback_and_raise( VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\" +", "Globals(client=client) lib_ast.add_attr(attr_name=\"syft\", attr=python_ast.attrs[\"syft\"]) lib_ast.add_attr(attr_name=\"torch\", attr=torch_ast.attrs[\"torch\"]) lib_ast.add_attr(attr_name=\"torchvision\", attr=torchvision_ast.attrs[\"torchvision\"]) # let the", "future created clients lib_ast.loaded_lib_constructors[lib] = update_ast except VendorLibraryImportException as e:", "for _, client in lib_ast.registered_clients.items(): update_ast(ast_or_client=client) # cache the constructor", "create_torch_ast(client=client) torchvision_ast = create_torchvision_ast(client=client) # numpy_ast = create_numpy_ast() lib_ast =", "supported if \"python\" in vendor_requirements: python_reqs = vendor_requirements[\"python\"] PYTHON_VERSION =", "sys from typing import Any from typing import Any as", "import critical from ..logger import traceback_and_raise from .misc import create_union_ast", "+ f\"Torch: {TORCH_VERSION} < {min_version}\" ) ) return True def", ") return True def load_lib(lib: str, options: TypeDict[str, TypeAny] =", "load {vendor_requirements['lib']}.\" + f\"Torch: {TORCH_VERSION} < {min_version}\" ) ) return", "\"torch\" in vendor_requirements: torch_reqs = vendor_requirements[\"torch\"] # third party import", "None) if update_ast is not None: global lib_ast update_ast(ast_or_client=lib_ast) for", "TypeDict[str, TypeAny]) -> bool: # see if python version is", "is not None: if TORCH_VERSION < version.parse(min_version): traceback_and_raise( VendorLibraryImportException( f\"Unable", "to load package support for: {lib}. {e}\") # now we", "getattr(vendor_ast, \"PACKAGE_SUPPORT\", None) PACKAGE_SUPPORT.update(options) if PACKAGE_SUPPORT is not None and", "update_ast except VendorLibraryImportException as e: critical(e) except Exception as e:", ") # see if torch version is supported if \"torch\"", "f\"Unable to load {vendor_requirements['lib']}.\" + f\"Python: {PYTHON_VERSION} < {min_version}\" )", "is not None: global lib_ast update_ast(ast_or_client=lib_ast) for _, client in", "getattr(vendor_ast, \"update_ast\", None) if update_ast is not None: global lib_ast", "# see if torch version is supported if \"torch\" in", "f\"Torch: {TORCH_VERSION} < {min_version}\" ) ) return True def load_lib(lib:", "vendor_ast = importlib.import_module(f\"syft.lib.{lib}\") PACKAGE_SUPPORT = getattr(vendor_ast, \"PACKAGE_SUPPORT\", None) PACKAGE_SUPPORT.update(options) if", "the last, as it needs the full ast solved #", "importlib.import_module(f\"syft.lib.{lib}\") PACKAGE_SUPPORT = getattr(vendor_ast, \"PACKAGE_SUPPORT\", None) PACKAGE_SUPPORT.update(options) if PACKAGE_SUPPORT is", "if TORCH_VERSION < version.parse(min_version): traceback_and_raise( VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\"", "\"syft\"), \"lib\") misc_root = getattr(getattr(lib_ast, \"syft\"), \"lib\") misc_root.add_attr(attr_name=\"misc\", attr=union_misc_ast.attrs[\"misc\"]) return", "Dict as TypeDict from typing import Optional # third party", "traceback_and_raise from .misc import create_union_ast class VendorLibraryImportException(Exception): pass def vendor_requirements_available(vendor_requirements:", "as it needs the full ast solved # to properly", "to properly generated unions union_misc_ast = getattr(getattr(create_union_ast(lib_ast, client), \"syft\"), \"lib\")", "if update_ast is not None: global lib_ast update_ast(ast_or_client=lib_ast) for _,", "if \"python\" in vendor_requirements: python_reqs = vendor_requirements[\"python\"] PYTHON_VERSION = sys.version_info", "vendor_requirements_available( vendor_requirements=PACKAGE_SUPPORT ): update_ast = getattr(vendor_ast, \"update_ast\", None) if update_ast", "Exception as e: critical(f\"Unable to load package support for: {lib}.", "the constructor for future created clients lib_ast.loaded_lib_constructors[lib] = update_ast except", "from typing import Any from typing import Any as TypeAny", "typing import Any as TypeAny from typing import Dict as", "relative from ..ast.globals import Globals from ..lib.python import create_python_ast from", "getattr(getattr(create_union_ast(lib_ast, client), \"syft\"), \"lib\") misc_root = getattr(getattr(lib_ast, \"syft\"), \"lib\") misc_root.add_attr(attr_name=\"misc\",", "full ast solved # to properly generated unions union_misc_ast =", "= vendor_requirements[\"torch\"] # third party import torch TORCH_VERSION = version.parse(torch.__version__.split(\"+\")[0])", "Globals from ..lib.python import create_python_ast from ..lib.torch import create_torch_ast from", "from typing import Dict as TypeDict from typing import Optional", "lib_ast.registered_clients.items(): update_ast(ast_or_client=client) # cache the constructor for future created clients", "numpy_ast = create_numpy_ast() lib_ast = Globals(client=client) lib_ast.add_attr(attr_name=\"syft\", attr=python_ast.attrs[\"syft\"]) lib_ast.add_attr(attr_name=\"torch\", attr=torch_ast.attrs[\"torch\"])", "< version.parse(min_version): traceback_and_raise( VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\" + f\"Torch:", "python_reqs.get(\"min_version\", None) if min_version is not None: if PYTHON_VERSION <", "critical(f\"Unable to load package support for: {lib}. {e}\") # now", "Globals: python_ast = create_python_ast(client=client) torch_ast = create_torch_ast(client=client) torchvision_ast = create_torchvision_ast(client=client)", "create_lib_ast(client: Optional[Any] = None) -> Globals: python_ast = create_python_ast(client=client) torch_ast", "created clients lib_ast.loaded_lib_constructors[lib] = update_ast except VendorLibraryImportException as e: critical(e)", "importlib.import_module(lib) vendor_ast = importlib.import_module(f\"syft.lib.{lib}\") PACKAGE_SUPPORT = getattr(vendor_ast, \"PACKAGE_SUPPORT\", None) PACKAGE_SUPPORT.update(options)", "for: {lib}. {e}\") # now we need to load the", "None: if TORCH_VERSION < version.parse(min_version): traceback_and_raise( VendorLibraryImportException( f\"Unable to load", "to load {vendor_requirements['lib']}.\" + f\"Torch: {TORCH_VERSION} < {min_version}\" ) )", "TypeDict from typing import Optional # third party from packaging", "torch_reqs.get(\"min_version\", None) if min_version is not None: if TORCH_VERSION <", "def create_lib_ast(client: Optional[Any] = None) -> Globals: python_ast = create_python_ast(client=client)", "PACKAGE_SUPPORT is not None and vendor_requirements_available( vendor_requirements=PACKAGE_SUPPORT ): update_ast =", "import create_torchvision_ast from ..logger import critical from ..logger import traceback_and_raise", "not None and vendor_requirements_available( vendor_requirements=PACKAGE_SUPPORT ): update_ast = getattr(vendor_ast, \"update_ast\",", "None: global lib_ast update_ast(ast_or_client=lib_ast) for _, client in lib_ast.registered_clients.items(): update_ast(ast_or_client=client)", "import version # syft relative from ..ast.globals import Globals from", "VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\" + f\"Torch: {TORCH_VERSION} < {min_version}\"", "be always the last, as it needs the full ast", "create_torchvision_ast(client=client) # numpy_ast = create_numpy_ast() lib_ast = Globals(client=client) lib_ast.add_attr(attr_name=\"syft\", attr=python_ast.attrs[\"syft\"])", "the misc creation be always the last, as it needs", "solved # to properly generated unions union_misc_ast = getattr(getattr(create_union_ast(lib_ast, client),", "see if python version is supported if \"python\" in vendor_requirements:", "as e: critical(f\"Unable to load package support for: {lib}. {e}\")", "as TypeDict from typing import Optional # third party from", "creation be always the last, as it needs the full", "e: critical(e) except Exception as e: critical(f\"Unable to load package", "except Exception as e: critical(f\"Unable to load package support for:", "from ..lib.torch import create_torch_ast from ..lib.torchvision import create_torchvision_ast from ..logger", "# to properly generated unions union_misc_ast = getattr(getattr(create_union_ast(lib_ast, client), \"syft\"),", "<reponame>godormad/PySyft # stdlib import importlib import sys from typing import", "constructor for future created clients lib_ast.loaded_lib_constructors[lib] = update_ast except VendorLibraryImportException", "= version.parse(torch.__version__.split(\"+\")[0]) min_version = torch_reqs.get(\"min_version\", None) if min_version is not", "from ..logger import critical from ..logger import traceback_and_raise from .misc", "version is supported if \"python\" in vendor_requirements: python_reqs = vendor_requirements[\"python\"]", "generated unions union_misc_ast = getattr(getattr(create_union_ast(lib_ast, client), \"syft\"), \"lib\") misc_root =", "traceback_and_raise( VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\" + f\"Torch: {TORCH_VERSION} <", "load package support for: {lib}. {e}\") # now we need", "attr=python_ast.attrs[\"syft\"]) lib_ast.add_attr(attr_name=\"torch\", attr=torch_ast.attrs[\"torch\"]) lib_ast.add_attr(attr_name=\"torchvision\", attr=torchvision_ast.attrs[\"torchvision\"]) # let the misc creation", "# numpy_ast = create_numpy_ast() lib_ast = Globals(client=client) lib_ast.add_attr(attr_name=\"syft\", attr=python_ast.attrs[\"syft\"]) lib_ast.add_attr(attr_name=\"torch\",", "= create_torch_ast(client=client) torchvision_ast = create_torchvision_ast(client=client) # numpy_ast = create_numpy_ast() lib_ast", "{}) -> None: try: _ = importlib.import_module(lib) vendor_ast = importlib.import_module(f\"syft.lib.{lib}\")", "to load {vendor_requirements['lib']}.\" + f\"Python: {PYTHON_VERSION} < {min_version}\" ) )", "create_torch_ast from ..lib.torchvision import create_torchvision_ast from ..logger import critical from", "in vendor_requirements: torch_reqs = vendor_requirements[\"torch\"] # third party import torch", "version # syft relative from ..ast.globals import Globals from ..lib.python", "import sys from typing import Any from typing import Any", "= create_python_ast(client=client) torch_ast = create_torch_ast(client=client) torchvision_ast = create_torchvision_ast(client=client) # numpy_ast", "node def create_lib_ast(client: Optional[Any] = None) -> Globals: python_ast =", "party import torch TORCH_VERSION = version.parse(torch.__version__.split(\"+\")[0]) min_version = torch_reqs.get(\"min_version\", None)", "try: _ = importlib.import_module(lib) vendor_ast = importlib.import_module(f\"syft.lib.{lib}\") PACKAGE_SUPPORT = getattr(vendor_ast,", "misc creation be always the last, as it needs the", "package support for: {lib}. {e}\") # now we need to", "except VendorLibraryImportException as e: critical(e) except Exception as e: critical(f\"Unable", ") ) # see if torch version is supported if", "unions union_misc_ast = getattr(getattr(create_union_ast(lib_ast, client), \"syft\"), \"lib\") misc_root = getattr(getattr(lib_ast,", "create_python_ast from ..lib.torch import create_torch_ast from ..lib.torchvision import create_torchvision_ast from", "None) -> Globals: python_ast = create_python_ast(client=client) torch_ast = create_torch_ast(client=client) torchvision_ast", "not None: if PYTHON_VERSION < min_version: traceback_and_raise( VendorLibraryImportException( f\"Unable to", "): update_ast = getattr(vendor_ast, \"update_ast\", None) if update_ast is not", "typing import Dict as TypeDict from typing import Optional #", "< {min_version}\" ) ) # see if torch version is", "typing import Any from typing import Any as TypeAny from", "typing import Optional # third party from packaging import version", "min_version is not None: if PYTHON_VERSION < min_version: traceback_and_raise( VendorLibraryImportException(", "None: if PYTHON_VERSION < min_version: traceback_and_raise( VendorLibraryImportException( f\"Unable to load", "= None) -> Globals: python_ast = create_python_ast(client=client) torch_ast = create_torch_ast(client=client)", "import Any from typing import Any as TypeAny from typing", "version.parse(torch.__version__.split(\"+\")[0]) min_version = torch_reqs.get(\"min_version\", None) if min_version is not None:", "Optional[Any] = None) -> Globals: python_ast = create_python_ast(client=client) torch_ast =", "from ..ast.globals import Globals from ..lib.python import create_python_ast from ..lib.torch", "def vendor_requirements_available(vendor_requirements: TypeDict[str, TypeAny]) -> bool: # see if python", "= {}) -> None: try: _ = importlib.import_module(lib) vendor_ast =", "..logger import critical from ..logger import traceback_and_raise from .misc import", "onto the node def create_lib_ast(client: Optional[Any] = None) -> Globals:", "client in lib_ast.registered_clients.items(): update_ast(ast_or_client=client) # cache the constructor for future", "create_python_ast(client=client) torch_ast = create_torch_ast(client=client) torchvision_ast = create_torchvision_ast(client=client) # numpy_ast =", "min_version is not None: if TORCH_VERSION < version.parse(min_version): traceback_and_raise( VendorLibraryImportException(", "= getattr(vendor_ast, \"PACKAGE_SUPPORT\", None) PACKAGE_SUPPORT.update(options) if PACKAGE_SUPPORT is not None", "update_ast(ast_or_client=lib_ast) for _, client in lib_ast.registered_clients.items(): update_ast(ast_or_client=client) # cache the", "vendor_requirements[\"python\"] PYTHON_VERSION = sys.version_info min_version = python_reqs.get(\"min_version\", None) if min_version", "import create_python_ast from ..lib.torch import create_torch_ast from ..lib.torchvision import create_torchvision_ast", "= Globals(client=client) lib_ast.add_attr(attr_name=\"syft\", attr=python_ast.attrs[\"syft\"]) lib_ast.add_attr(attr_name=\"torch\", attr=torch_ast.attrs[\"torch\"]) lib_ast.add_attr(attr_name=\"torchvision\", attr=torchvision_ast.attrs[\"torchvision\"]) # let", "support for: {lib}. {e}\") # now we need to load", "VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\" + f\"Python: {PYTHON_VERSION} < {min_version}\"", "union_misc_ast = getattr(getattr(create_union_ast(lib_ast, client), \"syft\"), \"lib\") misc_root = getattr(getattr(lib_ast, \"syft\"),", "update_ast is not None: global lib_ast update_ast(ast_or_client=lib_ast) for _, client", "we need to load the relevant frameworks onto the node", "the full ast solved # to properly generated unions union_misc_ast", "pass def vendor_requirements_available(vendor_requirements: TypeDict[str, TypeAny]) -> bool: # see if", "is supported if \"torch\" in vendor_requirements: torch_reqs = vendor_requirements[\"torch\"] #", "bool: # see if python version is supported if \"python\"", "Any as TypeAny from typing import Dict as TypeDict from", "the node def create_lib_ast(client: Optional[Any] = None) -> Globals: python_ast", "None) if min_version is not None: if PYTHON_VERSION < min_version:", "TypeAny]) -> bool: # see if python version is supported", "TypeDict[str, TypeAny] = {}) -> None: try: _ = importlib.import_module(lib)", "lib_ast.add_attr(attr_name=\"torchvision\", attr=torchvision_ast.attrs[\"torchvision\"]) # let the misc creation be always the", "it needs the full ast solved # to properly generated", "..ast.globals import Globals from ..lib.python import create_python_ast from ..lib.torch import", "is supported if \"python\" in vendor_requirements: python_reqs = vendor_requirements[\"python\"] PYTHON_VERSION", "see if torch version is supported if \"torch\" in vendor_requirements:", "..logger import traceback_and_raise from .misc import create_union_ast class VendorLibraryImportException(Exception): pass", "f\"Python: {PYTHON_VERSION} < {min_version}\" ) ) # see if torch", "packaging import version # syft relative from ..ast.globals import Globals", "vendor_requirements_available(vendor_requirements: TypeDict[str, TypeAny]) -> bool: # see if python version", "= vendor_requirements[\"python\"] PYTHON_VERSION = sys.version_info min_version = python_reqs.get(\"min_version\", None) if", "= getattr(getattr(lib_ast, \"syft\"), \"lib\") misc_root.add_attr(attr_name=\"misc\", attr=union_misc_ast.attrs[\"misc\"]) return lib_ast lib_ast =", "..lib.torchvision import create_torchvision_ast from ..logger import critical from ..logger import", "lib_ast.add_attr(attr_name=\"torch\", attr=torch_ast.attrs[\"torch\"]) lib_ast.add_attr(attr_name=\"torchvision\", attr=torchvision_ast.attrs[\"torchvision\"]) # let the misc creation be", "import Globals from ..lib.python import create_python_ast from ..lib.torch import create_torch_ast", "torch version is supported if \"torch\" in vendor_requirements: torch_reqs =", "TypeAny from typing import Dict as TypeDict from typing import", "-> bool: # see if python version is supported if", "f\"Unable to load {vendor_requirements['lib']}.\" + f\"Torch: {TORCH_VERSION} < {min_version}\" )", "{vendor_requirements['lib']}.\" + f\"Torch: {TORCH_VERSION} < {min_version}\" ) ) return True", "let the misc creation be always the last, as it", "min_version = python_reqs.get(\"min_version\", None) if min_version is not None: if", "< min_version: traceback_and_raise( VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\" + f\"Python:", "import create_torch_ast from ..lib.torchvision import create_torchvision_ast from ..logger import critical", "{PYTHON_VERSION} < {min_version}\" ) ) # see if torch version", "PACKAGE_SUPPORT.update(options) if PACKAGE_SUPPORT is not None and vendor_requirements_available( vendor_requirements=PACKAGE_SUPPORT ):", "update_ast = getattr(vendor_ast, \"update_ast\", None) if update_ast is not None:", "load the relevant frameworks onto the node def create_lib_ast(client: Optional[Any]", "third party from packaging import version # syft relative from", "python_ast = create_python_ast(client=client) torch_ast = create_torch_ast(client=client) torchvision_ast = create_torchvision_ast(client=client) #", "vendor_requirements=PACKAGE_SUPPORT ): update_ast = getattr(vendor_ast, \"update_ast\", None) if update_ast is", "load {vendor_requirements['lib']}.\" + f\"Python: {PYTHON_VERSION} < {min_version}\" ) ) #", "if python version is supported if \"python\" in vendor_requirements: python_reqs", "def load_lib(lib: str, options: TypeDict[str, TypeAny] = {}) -> None:", "# now we need to load the relevant frameworks onto", "VendorLibraryImportException(Exception): pass def vendor_requirements_available(vendor_requirements: TypeDict[str, TypeAny]) -> bool: # see", "None) PACKAGE_SUPPORT.update(options) if PACKAGE_SUPPORT is not None and vendor_requirements_available( vendor_requirements=PACKAGE_SUPPORT", "party from packaging import version # syft relative from ..ast.globals", "{TORCH_VERSION} < {min_version}\" ) ) return True def load_lib(lib: str,", "torchvision_ast = create_torchvision_ast(client=client) # numpy_ast = create_numpy_ast() lib_ast = Globals(client=client)", "import Any as TypeAny from typing import Dict as TypeDict", "version is supported if \"torch\" in vendor_requirements: torch_reqs = vendor_requirements[\"torch\"]", "from packaging import version # syft relative from ..ast.globals import", "from ..lib.python import create_python_ast from ..lib.torch import create_torch_ast from ..lib.torchvision", "PYTHON_VERSION = sys.version_info min_version = python_reqs.get(\"min_version\", None) if min_version is", "\"PACKAGE_SUPPORT\", None) PACKAGE_SUPPORT.update(options) if PACKAGE_SUPPORT is not None and vendor_requirements_available(", "# cache the constructor for future created clients lib_ast.loaded_lib_constructors[lib] =", "= update_ast except VendorLibraryImportException as e: critical(e) except Exception as", "import torch TORCH_VERSION = version.parse(torch.__version__.split(\"+\")[0]) min_version = torch_reqs.get(\"min_version\", None) if", "None: try: _ = importlib.import_module(lib) vendor_ast = importlib.import_module(f\"syft.lib.{lib}\") PACKAGE_SUPPORT =", "load_lib(lib: str, options: TypeDict[str, TypeAny] = {}) -> None: try:", "{min_version}\" ) ) # see if torch version is supported", "-> None: try: _ = importlib.import_module(lib) vendor_ast = importlib.import_module(f\"syft.lib.{lib}\") PACKAGE_SUPPORT", "torch_reqs = vendor_requirements[\"torch\"] # third party import torch TORCH_VERSION =", "version.parse(min_version): traceback_and_raise( VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\" + f\"Torch: {TORCH_VERSION}", "= getattr(getattr(create_union_ast(lib_ast, client), \"syft\"), \"lib\") misc_root = getattr(getattr(lib_ast, \"syft\"), \"lib\")", "lib_ast.add_attr(attr_name=\"syft\", attr=python_ast.attrs[\"syft\"]) lib_ast.add_attr(attr_name=\"torch\", attr=torch_ast.attrs[\"torch\"]) lib_ast.add_attr(attr_name=\"torchvision\", attr=torchvision_ast.attrs[\"torchvision\"]) # let the misc", "torch_ast = create_torch_ast(client=client) torchvision_ast = create_torchvision_ast(client=client) # numpy_ast = create_numpy_ast()", "e: critical(f\"Unable to load package support for: {lib}. {e}\") #", "# third party import torch TORCH_VERSION = version.parse(torch.__version__.split(\"+\")[0]) min_version =", "create_torchvision_ast from ..logger import critical from ..logger import traceback_and_raise from", "as e: critical(e) except Exception as e: critical(f\"Unable to load", "import importlib import sys from typing import Any from typing", "import Optional # third party from packaging import version #", "TORCH_VERSION = version.parse(torch.__version__.split(\"+\")[0]) min_version = torch_reqs.get(\"min_version\", None) if min_version is", "last, as it needs the full ast solved # to", "is not None: if PYTHON_VERSION < min_version: traceback_and_raise( VendorLibraryImportException( f\"Unable", "{e}\") # now we need to load the relevant frameworks", "from ..logger import traceback_and_raise from .misc import create_union_ast class VendorLibraryImportException(Exception):", ") ) return True def load_lib(lib: str, options: TypeDict[str, TypeAny]", "from typing import Any as TypeAny from typing import Dict", "= importlib.import_module(f\"syft.lib.{lib}\") PACKAGE_SUPPORT = getattr(vendor_ast, \"PACKAGE_SUPPORT\", None) PACKAGE_SUPPORT.update(options) if PACKAGE_SUPPORT", "python version is supported if \"python\" in vendor_requirements: python_reqs =", "to load the relevant frameworks onto the node def create_lib_ast(client:", "# let the misc creation be always the last, as", "now we need to load the relevant frameworks onto the", "min_version = torch_reqs.get(\"min_version\", None) if min_version is not None: if", "import Dict as TypeDict from typing import Optional # third", "class VendorLibraryImportException(Exception): pass def vendor_requirements_available(vendor_requirements: TypeDict[str, TypeAny]) -> bool: #", "importlib import sys from typing import Any from typing import", "min_version: traceback_and_raise( VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\" + f\"Python: {PYTHON_VERSION}", "\"python\" in vendor_requirements: python_reqs = vendor_requirements[\"python\"] PYTHON_VERSION = sys.version_info min_version", "for future created clients lib_ast.loaded_lib_constructors[lib] = update_ast except VendorLibraryImportException as", "client), \"syft\"), \"lib\") misc_root = getattr(getattr(lib_ast, \"syft\"), \"lib\") misc_root.add_attr(attr_name=\"misc\", attr=union_misc_ast.attrs[\"misc\"])", "not None: if TORCH_VERSION < version.parse(min_version): traceback_and_raise( VendorLibraryImportException( f\"Unable to", "if PYTHON_VERSION < min_version: traceback_and_raise( VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\"", "if min_version is not None: if PYTHON_VERSION < min_version: traceback_and_raise(", "# see if python version is supported if \"python\" in", "relevant frameworks onto the node def create_lib_ast(client: Optional[Any] = None)", "= create_torchvision_ast(client=client) # numpy_ast = create_numpy_ast() lib_ast = Globals(client=client) lib_ast.add_attr(attr_name=\"syft\",", "clients lib_ast.loaded_lib_constructors[lib] = update_ast except VendorLibraryImportException as e: critical(e) except", "lib_ast update_ast(ast_or_client=lib_ast) for _, client in lib_ast.registered_clients.items(): update_ast(ast_or_client=client) # cache", "= python_reqs.get(\"min_version\", None) if min_version is not None: if PYTHON_VERSION", "= sys.version_info min_version = python_reqs.get(\"min_version\", None) if min_version is not", "str, options: TypeDict[str, TypeAny] = {}) -> None: try: _", "in vendor_requirements: python_reqs = vendor_requirements[\"python\"] PYTHON_VERSION = sys.version_info min_version =", "as TypeAny from typing import Dict as TypeDict from typing", "create_union_ast class VendorLibraryImportException(Exception): pass def vendor_requirements_available(vendor_requirements: TypeDict[str, TypeAny]) -> bool:", "create_numpy_ast() lib_ast = Globals(client=client) lib_ast.add_attr(attr_name=\"syft\", attr=python_ast.attrs[\"syft\"]) lib_ast.add_attr(attr_name=\"torch\", attr=torch_ast.attrs[\"torch\"]) lib_ast.add_attr(attr_name=\"torchvision\", attr=torchvision_ast.attrs[\"torchvision\"])", "return True def load_lib(lib: str, options: TypeDict[str, TypeAny] = {})", "{vendor_requirements['lib']}.\" + f\"Python: {PYTHON_VERSION} < {min_version}\" ) ) # see", "if torch version is supported if \"torch\" in vendor_requirements: torch_reqs", "options: TypeDict[str, TypeAny] = {}) -> None: try: _ =", "_ = importlib.import_module(lib) vendor_ast = importlib.import_module(f\"syft.lib.{lib}\") PACKAGE_SUPPORT = getattr(vendor_ast, \"PACKAGE_SUPPORT\",", "sys.version_info min_version = python_reqs.get(\"min_version\", None) if min_version is not None:", "ast solved # to properly generated unions union_misc_ast = getattr(getattr(create_union_ast(lib_ast,", "and vendor_requirements_available( vendor_requirements=PACKAGE_SUPPORT ): update_ast = getattr(vendor_ast, \"update_ast\", None) if", "# stdlib import importlib import sys from typing import Any", "properly generated unions union_misc_ast = getattr(getattr(create_union_ast(lib_ast, client), \"syft\"), \"lib\") misc_root", "misc_root = getattr(getattr(lib_ast, \"syft\"), \"lib\") misc_root.add_attr(attr_name=\"misc\", attr=union_misc_ast.attrs[\"misc\"]) return lib_ast lib_ast", "_, client in lib_ast.registered_clients.items(): update_ast(ast_or_client=client) # cache the constructor for", "# syft relative from ..ast.globals import Globals from ..lib.python import", "vendor_requirements: torch_reqs = vendor_requirements[\"torch\"] # third party import torch TORCH_VERSION", "if \"torch\" in vendor_requirements: torch_reqs = vendor_requirements[\"torch\"] # third party", "= importlib.import_module(lib) vendor_ast = importlib.import_module(f\"syft.lib.{lib}\") PACKAGE_SUPPORT = getattr(vendor_ast, \"PACKAGE_SUPPORT\", None)", "None and vendor_requirements_available( vendor_requirements=PACKAGE_SUPPORT ): update_ast = getattr(vendor_ast, \"update_ast\", None)", ".misc import create_union_ast class VendorLibraryImportException(Exception): pass def vendor_requirements_available(vendor_requirements: TypeDict[str, TypeAny])", "stdlib import importlib import sys from typing import Any from", "VendorLibraryImportException as e: critical(e) except Exception as e: critical(f\"Unable to", "needs the full ast solved # to properly generated unions", "{min_version}\" ) ) return True def load_lib(lib: str, options: TypeDict[str,", "vendor_requirements[\"torch\"] # third party import torch TORCH_VERSION = version.parse(torch.__version__.split(\"+\")[0]) min_version", "..lib.python import create_python_ast from ..lib.torch import create_torch_ast from ..lib.torchvision import", "Optional # third party from packaging import version # syft", "# third party from packaging import version # syft relative", "third party import torch TORCH_VERSION = version.parse(torch.__version__.split(\"+\")[0]) min_version = torch_reqs.get(\"min_version\",", "lib_ast.loaded_lib_constructors[lib] = update_ast except VendorLibraryImportException as e: critical(e) except Exception", "from ..lib.torchvision import create_torchvision_ast from ..logger import critical from ..logger", "\"update_ast\", None) if update_ast is not None: global lib_ast update_ast(ast_or_client=lib_ast)", "critical(e) except Exception as e: critical(f\"Unable to load package support", "getattr(getattr(lib_ast, \"syft\"), \"lib\") misc_root.add_attr(attr_name=\"misc\", attr=union_misc_ast.attrs[\"misc\"]) return lib_ast lib_ast = create_lib_ast(None)", "need to load the relevant frameworks onto the node def", "python_reqs = vendor_requirements[\"python\"] PYTHON_VERSION = sys.version_info min_version = python_reqs.get(\"min_version\", None)", "+ f\"Python: {PYTHON_VERSION} < {min_version}\" ) ) # see if", "if min_version is not None: if TORCH_VERSION < version.parse(min_version): traceback_and_raise(", "= getattr(vendor_ast, \"update_ast\", None) if update_ast is not None: global", "PYTHON_VERSION < min_version: traceback_and_raise( VendorLibraryImportException( f\"Unable to load {vendor_requirements['lib']}.\" +", "from typing import Optional # third party from packaging import", "cache the constructor for future created clients lib_ast.loaded_lib_constructors[lib] = update_ast", "if PACKAGE_SUPPORT is not None and vendor_requirements_available( vendor_requirements=PACKAGE_SUPPORT ): update_ast", "syft relative from ..ast.globals import Globals from ..lib.python import create_python_ast", "{lib}. {e}\") # now we need to load the relevant", "in lib_ast.registered_clients.items(): update_ast(ast_or_client=client) # cache the constructor for future created", "import traceback_and_raise from .misc import create_union_ast class VendorLibraryImportException(Exception): pass def", "always the last, as it needs the full ast solved", "global lib_ast update_ast(ast_or_client=lib_ast) for _, client in lib_ast.registered_clients.items(): update_ast(ast_or_client=client) #", "torch TORCH_VERSION = version.parse(torch.__version__.split(\"+\")[0]) min_version = torch_reqs.get(\"min_version\", None) if min_version", "vendor_requirements: python_reqs = vendor_requirements[\"python\"] PYTHON_VERSION = sys.version_info min_version = python_reqs.get(\"min_version\",", "supported if \"torch\" in vendor_requirements: torch_reqs = vendor_requirements[\"torch\"] # third", "from .misc import create_union_ast class VendorLibraryImportException(Exception): pass def vendor_requirements_available(vendor_requirements: TypeDict[str,", "import create_union_ast class VendorLibraryImportException(Exception): pass def vendor_requirements_available(vendor_requirements: TypeDict[str, TypeAny]) ->" ]
[ "map_q = args.map_q size_range = args.size_range CPU = args.CPU #", "parser.add_argument('--mapable_regions', help='highly mapable regions to be used in GC correction,", "CPU sublists = np.array_split(mapable_intervals,CPU) #split the list into sublists, one", "from multiprocessing import Pool # In[ ]: # ##arguments for", "= args.bam_file bam_file_name = args.bam_file_name mapable_path=args.mapable_regions ref_seq_path = args.ref_seq chrom_sizes_path", "'+str(size_range)) print('\\tCPU = '+str(CPU)) # In[ ]: mapable_name = mapable_path.rsplit('/',1)[1].rsplit('.',1)[0]", "= mapable_intervals[mapable_intervals[0].isin(chroms)] print('chroms:', chroms) print('number_of_intervals:',len(mapable_intervals)) sys.stdout.flush() # In[ ]: def", "i%5000==0: print('interval',i,':',chrom,start,end,'seconds:',np.round(time.time()-start_time)) sys.stdout.flush() #fetch any read that overlaps the inteterval", "= fragment_seq.astype(int) num_GC = int(fragment_seq.sum()) GC_dict[abs(read.template_length)][num_GC]+=1 print('done') return(GC_dict) # In[", "in fetched: #use both fw (positive template length) and rv", "into sublists, one per CPU GC_dict_list = p.map(collect_reads, sublists, 1)", "\"'+bam_file_name+'\"') print('\\tmapable_regions = \"'+mapable_path+'\"') print('\\tref_seq_path = \"'+ref_seq_path+'\"') print('\\tchrom_sizes_path = \"'+chrom_sizes_path+'\"')", "print('done') return(GC_dict) # In[ ]: start_time = time.time() p =", "sequence (fasta format)',required=True) parser.add_argument('--chrom_sizes',help='path to chromosome sizes for the reference", "all_GC_df.sum(axis=1) all_GC_df = pd.DataFrame(all_GC_df).rename(columns = {0:'number_of_fragments'}) all_GC_df = all_GC_df.reset_index() all_GC_df.to_csv(out_file,sep='\\t',index=False)", "overlaps the inteterval (don't need to extend the interval because", "# bam_file_name = 'MBC_1041_1_ULP' # mapable_path = '../../downloads/genome/repeat_masker.mapable.k50.Umap.hg38.bedGraph' # ref_seq_path", "= \"'+bam_file_name+'\"') print('\\tmapable_regions = \"'+mapable_path+'\"') print('\\tref_seq_path = \"'+ref_seq_path+'\"') print('\\tchrom_sizes_path =", "parser.parse_args() bam_file_path = args.bam_file bam_file_name = args.bam_file_name mapable_path=args.mapable_regions ref_seq_path =", "help='sample_bam_file', required=True) parser.add_argument('--bam_file_name', help='sample name (does not need to match", "# ################# # if read.is_reverse==False: # if fragment_seq[0:read.reference_length]==read.query_sequence and len(fragment_seq)==read.template_length:", "read.is_reverse==True: # if fragment_seq[-read.reference_length:]==read.query_sequence and len(fragment_seq)==-read.template_length: # print('rv match',read.reference_length) #", "required=True) parser.add_argument('--ref_seq',help='reference sequence (fasta format)',required=True) parser.add_argument('--chrom_sizes',help='path to chromosome sizes for", "integer) #should be very rare if the filter is done", "foramt', required=True) parser.add_argument('--ref_seq',help='reference sequence (fasta format)',required=True) parser.add_argument('--chrom_sizes',help='path to chromosome sizes", "length in GC_dict.keys(): current = pd.Series(GC_dict[length]).reset_index() current = current.rename(columns={'index':'num_GC',0:'number_of_fragments'}) current['length']=length", "pd.Series(GC_dict[length]).reset_index() current = current.rename(columns={'index':'num_GC',0:'number_of_fragments'}) current['length']=length current = current[['length','num_GC','number_of_fragments']] GC_df =", "the filter is done right fragment_seq = fragment_seq.astype(int) num_GC =", "= read_end + read.template_length fragment_seq = ref_seq.fetch(read.reference_name,read_start,read_end) #tally up the", "sublists, one per CPU GC_dict_list = p.map(collect_reads, sublists, 1) #", "+'/'+mapable_name+'/GC_counts/'): os.mkdir(out_dir +'/'+mapable_name+'/GC_counts/') # In[ ]: #import filter mapable_intervals =", "mapable_intervals[mapable_intervals[0].isin(chroms)] print('chroms:', chroms) print('number_of_intervals:',len(mapable_intervals)) sys.stdout.flush() # In[ ]: def collect_reads(sublist):", "random 0 or 1 for N (so that you always", "= args.map_q size_range = args.size_range CPU = args.CPU # In[", "template length) reads if (read.is_reverse==False and read.template_length>=size_range[0] and read.template_length<=size_range[1]) or", "and read.mapping_quality>=map_q and read.is_duplicate==False and read.is_qcfail==False: if read.is_reverse==False: read_start =", "# print('fw match',read.reference_length) # else: # print(fragment_seq[0:read.reference_length],read.reference_length,'fw') # print(read.query_sequence,len(read.query_sequence),'fw') #", "current = current[['length','num_GC','number_of_fragments']] GC_df = GC_df.append(current, ignore_index=True) GC_df = GC_df.set_index(['length','num_GC'])", "CPU = 4 # In[ ]: parser = argparse.ArgumentParser() parser.add_argument('--bam_file',", "in range(1,23)] mapable_intervals = mapable_intervals[mapable_intervals[0].isin(chroms)] print('chroms:', chroms) print('number_of_intervals:',len(mapable_intervals)) sys.stdout.flush() #", "correction, bedGraph or bed foramt', required=True) parser.add_argument('--ref_seq',help='reference sequence (fasta format)',required=True)", "read_start = read_end + read.template_length fragment_seq = ref_seq.fetch(read.reference_name,read_start,read_end) #tally up", "ref_seq_path = '/fh/fast/ha_g/grp/reference/GRCh38/GRCh38.fa' # chrom_sizes_path = '/fh/fast/ha_g/grp/reference/GRCh38/hg38.standard.chrom.sizes' # out_dir =", "read in fetched: #use both fw (positive template length) and", "of each read length and GC content GC_dict = {}", "else: # print(fragment_seq[-read.reference_length:],read.reference_length,'rv') # print(read.query_sequence,len(read.query_sequence),'rv') # print(len(fragment_seq),read.template_length) # print('\\n') #", "not os.path.exists(out_dir +'/'+mapable_name+'/GC_counts/'): os.mkdir(out_dir +'/'+mapable_name+'/GC_counts/') # In[ ]: #import filter", "print('interval',i,':',chrom,start,end,'seconds:',np.round(time.time()-start_time)) sys.stdout.flush() #fetch any read that overlaps the inteterval (don't", "all_GC_df = all_GC_df.sum(axis=1) all_GC_df = pd.DataFrame(all_GC_df).rename(columns = {0:'number_of_fragments'}) all_GC_df =", "# ##logic check#### # ################# # if read.is_reverse==False: # if", "print('\\tsize_range = '+str(size_range)) print('\\tCPU = '+str(CPU)) # In[ ]: mapable_name", "read.reference_start + read.reference_length read_start = read_end + read.template_length fragment_seq =", "sublists, 1) # In[ ]: all_GC_df = pd.DataFrame() for i,GC_dict", "always get an integer) #should be very rare if the", "might also need to be in the loop #import the", "In[ ]: parser = argparse.ArgumentParser() parser.add_argument('--bam_file', help='sample_bam_file', required=True) parser.add_argument('--bam_file_name', help='sample", "#import the ref_seq ref_seq=pysam.FastaFile(ref_seq_path) for i in range(len(sublist)): chrom =", "]: print('arguments provided:') print('\\tbam_file_path = \"'+bam_file_path+'\"') print('\\tbam_file_name = \"'+bam_file_name+'\"') print('\\tmapable_regions", "parser.add_argument('--bam_file', help='sample_bam_file', required=True) parser.add_argument('--bam_file_name', help='sample name (does not need to", "need to extend the interval because the fetch function does", "fetched = bam_file.fetch(chrom,start,end) for read in fetched: #use both fw", "##logic check#### # ################# # if read.is_reverse==False: # if fragment_seq[0:read.reference_length]==read.query_sequence", "filter is done right fragment_seq = fragment_seq.astype(int) num_GC = int(fragment_seq.sum())", "import numpy as np import time import argparse import sys", "args.size_range CPU = args.CPU # In[ ]: print('arguments provided:') print('\\tbam_file_path", "ref_seq=pysam.FastaFile(ref_seq_path) for i in range(len(sublist)): chrom = sublist.iloc[i][0] start =", "In[ ]: import pysam import os import pandas as pd", "help='sample name (does not need to match actual file name)',", "truncated file warning bam_file = pysam.AlignmentFile(bam_file_path, \"rb\") print('sublist intervals:',len(sublist)) #this", "type=int, required=True) args = parser.parse_args() bam_file_path = args.bam_file bam_file_name =", "#fetch any read that overlaps the inteterval (don't need to", "GC_df['number_of_fragments'] del(GC_df,GC_dict) all_GC_df = all_GC_df.sum(axis=1) all_GC_df = pd.DataFrame(all_GC_df).rename(columns = {0:'number_of_fragments'})", "= p.map(collect_reads, sublists, 1) # In[ ]: all_GC_df = pd.DataFrame()", "read.reference_start+read.template_length elif read.is_reverse==True: read_end = read.reference_start + read.reference_length read_start =", "included',nargs=2, type=int, required=True) parser.add_argument('--CPU',help='number of CPU for parallelizing', type=int, required=True)", "\"'+chrom_sizes_path+'\"') print('\\tout_dir = \"'+out_dir+'\"') print('\\tmap_q = '+str(map_q)) print('\\tsize_range = '+str(size_range))", "GC data if not os.path.exists(out_dir +'/'+mapable_name): os.mkdir(out_dir +'/'+mapable_name) if not", "# In[ ]: #import filter mapable_intervals = pd.read_csv(mapable_path, sep='\\t', header=None)", "#import the bam file #this needs to be done within", "would like to keep these if read.is_paired==True and read.mapping_quality>=map_q and", "parser.add_argument('--out_dir',help='folder for GC bias results',required=True) parser.add_argument('--map_q',help='minimum mapping quality for reads", "#replace with values fragment_seq[(fragment_seq=='G') | (fragment_seq=='C')]=1 fragment_seq[(fragment_seq=='A') | (fragment_seq=='T')]=0 fragment_seq[(fragment_seq=='N')]=np.random.randint(2)", "args.bam_file_name mapable_path=args.mapable_regions ref_seq_path = args.ref_seq chrom_sizes_path = args.chrom_sizes out_dir =", "out_dir = args.out_dir map_q = args.map_q size_range = args.size_range CPU", "4 # In[ ]: parser = argparse.ArgumentParser() parser.add_argument('--bam_file', help='sample_bam_file', required=True)", "fragment_seq = np.array(list(fragment_seq)) #replace with values fragment_seq[(fragment_seq=='G') | (fragment_seq=='C')]=1 fragment_seq[(fragment_seq=='A')", "match',read.reference_length) # else: # print(fragment_seq[0:read.reference_length],read.reference_length,'fw') # print(read.query_sequence,len(read.query_sequence),'fw') # print(len(fragment_seq),read.template_length) #", "= current.rename(columns={'index':'num_GC',0:'number_of_fragments'}) current['length']=length current = current[['length','num_GC','number_of_fragments']] GC_df = GC_df.append(current, ignore_index=True)", "args.ref_seq chrom_sizes_path = args.chrom_sizes out_dir = args.out_dir map_q = args.map_q", "directory for the GC data if not os.path.exists(out_dir +'/'+mapable_name): os.mkdir(out_dir", "loop otherwise it gives a truncated file warning bam_file =", "#split and convert to numpy array fragment_seq = np.array(list(fragment_seq)) #replace", "= all_GC_df.reset_index() all_GC_df.to_csv(out_file,sep='\\t',index=False) # In[ ]: print('done') # In[ ]:", "for i in range(len(sublist)): chrom = sublist.iloc[i][0] start = sublist.iloc[i][1]", "GC_dict[length][num_GC]=0 #import the bam file #this needs to be done", "In[ ]: # ##arguments for testing # bam_file_path = '/fh/scratch/delete90/ha_g/realigned_bams/cfDNA_MBC_ULP_hg38/realign_bam_paired_snakemake-master/results/MBC_1041_1_ULP/MBC_1041_1_ULP_recalibrated.bam'", "one per CPU GC_dict_list = p.map(collect_reads, sublists, 1) # In[", "GC_df = GC_df.append(current, ignore_index=True) GC_df = GC_df.set_index(['length','num_GC']) all_GC_df[i] = GC_df['number_of_fragments']", "#this needs to be done within the loop otherwise it", "import sys from multiprocessing import Pool # In[ ]: #", "mapable_intervals = mapable_intervals[mapable_intervals[0].isin(chroms)] print('chroms:', chroms) print('number_of_intervals:',len(mapable_intervals)) sys.stdout.flush() # In[ ]:", "]: #create a directory for the GC data if not", "required=True) parser.add_argument('--bam_file_name', help='sample name (does not need to match actual", "to numpy array fragment_seq = np.array(list(fragment_seq)) #replace with values fragment_seq[(fragment_seq=='G')", "import pysam import os import pandas as pd import numpy", "# out_dir = './tmp/' # map_q = 20 # size_range", "sublist.iloc[i][0] start = sublist.iloc[i][1] end = sublist.iloc[i][2] if i%5000==0: print('interval',i,':',chrom,start,end,'seconds:',np.round(time.time()-start_time))", "import pandas as pd import numpy as np import time", "for parallelizing', type=int, required=True) args = parser.parse_args() bam_file_path = args.bam_file", "loop #import the ref_seq ref_seq=pysam.FastaFile(ref_seq_path) for i in range(len(sublist)): chrom", "(fasta format)',required=True) parser.add_argument('--chrom_sizes',help='path to chromosome sizes for the reference seq',required=True)", "the loop #import the ref_seq ref_seq=pysam.FastaFile(ref_seq_path) for i in range(len(sublist)):", "print('\\tout_dir = \"'+out_dir+'\"') print('\\tmap_q = '+str(map_q)) print('\\tsize_range = '+str(size_range)) print('\\tCPU", "= \"'+out_dir+'\"') print('\\tmap_q = '+str(map_q)) print('\\tsize_range = '+str(size_range)) print('\\tCPU =", "otherwise it gives a truncated file warning bam_file = pysam.AlignmentFile(bam_file_path,", "# In[ ]: #create a directory for the GC data", "rare if the filter is done right fragment_seq = fragment_seq.astype(int)", "out_dir +'/'+mapable_name+'/GC_counts/'+ bam_file_name+'.GC_counts.txt' print('out_file',out_file) # In[ ]: #create a directory", "= read.reference_start+read.template_length elif read.is_reverse==True: read_end = read.reference_start + read.reference_length read_start", "argparse.ArgumentParser() parser.add_argument('--bam_file', help='sample_bam_file', required=True) parser.add_argument('--bam_file_name', help='sample name (does not need", "for length in range(size_range[0],size_range[1]+1): GC_dict[length]={} for num_GC in range(0,length+1): GC_dict[length][num_GC]=0", "length) and rv (negative template length) reads if (read.is_reverse==False and", "intervals:',len(sublist)) #this might also need to be in the loop", "file #this needs to be done within the loop otherwise", "template length) and rv (negative template length) reads if (read.is_reverse==False", "right fragment_seq = fragment_seq.astype(int) num_GC = int(fragment_seq.sum()) GC_dict[abs(read.template_length)][num_GC]+=1 print('done') return(GC_dict)", "= '/fh/fast/ha_g/grp/reference/GRCh38/hg38.standard.chrom.sizes' # out_dir = './tmp/' # map_q = 20", "# else: # print(fragment_seq[0:read.reference_length],read.reference_length,'fw') # print(read.query_sequence,len(read.query_sequence),'fw') # print(len(fragment_seq),read.template_length) # print('\\n')", "or bed foramt', required=True) parser.add_argument('--ref_seq',help='reference sequence (fasta format)',required=True) parser.add_argument('--chrom_sizes',help='path to", "parser.add_argument('--size_range',help='range of read sizes to be included',nargs=2, type=int, required=True) parser.add_argument('--CPU',help='number", "also need to be in the loop #import the ref_seq", "parallelizing', type=int, required=True) args = parser.parse_args() bam_file_path = args.bam_file bam_file_name", "read_end = read.reference_start+read.template_length elif read.is_reverse==True: read_end = read.reference_start + read.reference_length", "# In[ ]: import pysam import os import pandas as", "args.CPU # In[ ]: print('arguments provided:') print('\\tbam_file_path = \"'+bam_file_path+'\"') print('\\tbam_file_name", "be done within the loop otherwise it gives a truncated", "chroms = ['chr'+str(m) for m in range(1,23)] mapable_intervals = mapable_intervals[mapable_intervals[0].isin(chroms)]", "to be included',nargs=2, type=int, required=True) parser.add_argument('--CPU',help='number of CPU for parallelizing',", "chromosome sizes for the reference seq',required=True) parser.add_argument('--out_dir',help='folder for GC bias", "0 or 1 for N (so that you always get", "print('\\tbam_file_path = \"'+bam_file_path+'\"') print('\\tbam_file_name = \"'+bam_file_name+'\"') print('\\tmapable_regions = \"'+mapable_path+'\"') print('\\tref_seq_path", "range(size_range[0],size_range[1]+1): GC_dict[length]={} for num_GC in range(0,length+1): GC_dict[length][num_GC]=0 #import the bam", "X and Y chroms = ['chr'+str(m) for m in range(1,23)]", "fetched: #use both fw (positive template length) and rv (negative", "= '+str(CPU)) # In[ ]: mapable_name = mapable_path.rsplit('/',1)[1].rsplit('.',1)[0] out_file =", "gives a truncated file warning bam_file = pysam.AlignmentFile(bam_file_path, \"rb\") print('sublist", "GC_dict = {} for length in range(size_range[0],size_range[1]+1): GC_dict[length]={} for num_GC", "# mapable_path = '../../downloads/genome/repeat_masker.mapable.k50.Umap.hg38.bedGraph' # ref_seq_path = '/fh/fast/ha_g/grp/reference/GRCh38/GRCh38.fa' # chrom_sizes_path", "else: # print(fragment_seq[0:read.reference_length],read.reference_length,'fw') # print(read.query_sequence,len(read.query_sequence),'fw') # print(len(fragment_seq),read.template_length) # print('\\n') #", "the GC data if not os.path.exists(out_dir +'/'+mapable_name): os.mkdir(out_dir +'/'+mapable_name) if", "the inteterval (don't need to extend the interval because the", "args.bam_file bam_file_name = args.bam_file_name mapable_path=args.mapable_regions ref_seq_path = args.ref_seq chrom_sizes_path =", "| (fragment_seq=='T')]=0 fragment_seq[(fragment_seq=='N')]=np.random.randint(2) #choose a random 0 or 1 for", "################# #split and convert to numpy array fragment_seq = np.array(list(fragment_seq))", "bed foramt', required=True) parser.add_argument('--ref_seq',help='reference sequence (fasta format)',required=True) parser.add_argument('--chrom_sizes',help='path to chromosome", "['chr'+str(m) for m in range(1,23)] mapable_intervals = mapable_intervals[mapable_intervals[0].isin(chroms)] print('chroms:', chroms)", "header=None) #remove non standard chromosomes and X and Y chroms", "= read.reference_start read_end = read.reference_start+read.template_length elif read.is_reverse==True: read_end = read.reference_start", "#create a directory for the GC data if not os.path.exists(out_dir", "read_start = read.reference_start read_end = read.reference_start+read.template_length elif read.is_reverse==True: read_end =", "20 # size_range = [15,500] # CPU = 4 #", "print('rv match',read.reference_length) # else: # print(fragment_seq[-read.reference_length:],read.reference_length,'rv') # print(read.query_sequence,len(read.query_sequence),'rv') # print(len(fragment_seq),read.template_length)", "]: mapable_name = mapable_path.rsplit('/',1)[1].rsplit('.',1)[0] out_file = out_dir +'/'+mapable_name+'/GC_counts/'+ bam_file_name+'.GC_counts.txt' print('out_file',out_file)", "os.mkdir(out_dir +'/'+mapable_name+'/GC_counts/') # In[ ]: #import filter mapable_intervals = pd.read_csv(mapable_path,", "utf-8 # In[ ]: import pysam import os import pandas", "'MBC_1041_1_ULP' # mapable_path = '../../downloads/genome/repeat_masker.mapable.k50.Umap.hg38.bedGraph' # ref_seq_path = '/fh/fast/ha_g/grp/reference/GRCh38/GRCh38.fa' #", "be included',nargs=2, type=int, required=True) parser.add_argument('--CPU',help='number of CPU for parallelizing', type=int,", "= pd.DataFrame() for length in GC_dict.keys(): current = pd.Series(GC_dict[length]).reset_index() current", "= GC_df.set_index(['length','num_GC']) all_GC_df[i] = GC_df['number_of_fragments'] del(GC_df,GC_dict) all_GC_df = all_GC_df.sum(axis=1) all_GC_df", "= parser.parse_args() bam_file_path = args.bam_file bam_file_name = args.bam_file_name mapable_path=args.mapable_regions ref_seq_path", "In[ ]: mapable_name = mapable_path.rsplit('/',1)[1].rsplit('.',1)[0] out_file = out_dir +'/'+mapable_name+'/GC_counts/'+ bam_file_name+'.GC_counts.txt'", "for length in GC_dict.keys(): current = pd.Series(GC_dict[length]).reset_index() current = current.rename(columns={'index':'num_GC',0:'number_of_fragments'})", "pd.DataFrame(all_GC_df).rename(columns = {0:'number_of_fragments'}) all_GC_df = all_GC_df.reset_index() all_GC_df.to_csv(out_file,sep='\\t',index=False) # In[ ]:", "GC content fragment_seq=fragment_seq.replace('g','G').replace('c','C').replace('a','A').replace('t','T').replace('n','N') # ################# # ##logic check#### # #################", "for the reference seq',required=True) parser.add_argument('--out_dir',help='folder for GC bias results',required=True) parser.add_argument('--map_q',help='minimum", "match actual file name)', required=True) parser.add_argument('--mapable_regions', help='highly mapable regions to", "collect_reads(sublist): #create a dict for holding the frequency of each", "mapable_path.rsplit('/',1)[1].rsplit('.',1)[0] out_file = out_dir +'/'+mapable_name+'/GC_counts/'+ bam_file_name+'.GC_counts.txt' print('out_file',out_file) # In[ ]:", "actual file name)', required=True) parser.add_argument('--mapable_regions', help='highly mapable regions to be", "the list into sublists, one per CPU GC_dict_list = p.map(collect_reads,", "read.is_reverse==False: # if fragment_seq[0:read.reference_length]==read.query_sequence and len(fragment_seq)==read.template_length: # print('fw match',read.reference_length) #", "\"rb\") print('sublist intervals:',len(sublist)) #this might also need to be in", "]: # ##arguments for testing # bam_file_path = '/fh/scratch/delete90/ha_g/realigned_bams/cfDNA_MBC_ULP_hg38/realign_bam_paired_snakemake-master/results/MBC_1041_1_ULP/MBC_1041_1_ULP_recalibrated.bam' #", "read.reference_length read_start = read_end + read.template_length fragment_seq = ref_seq.fetch(read.reference_name,read_start,read_end) #tally", "holding the frequency of each read length and GC content", "fragment_seq[(fragment_seq=='A') | (fragment_seq=='T')]=0 fragment_seq[(fragment_seq=='N')]=np.random.randint(2) #choose a random 0 or 1", "chrom_sizes_path = args.chrom_sizes out_dir = args.out_dir map_q = args.map_q size_range", "'./tmp/' # map_q = 20 # size_range = [15,500] #", "this automatically) fetched = bam_file.fetch(chrom,start,end) for read in fetched: #use", "# In[ ]: print('done') # In[ ]: # In[ ]:", "# elif read.is_reverse==True: # if fragment_seq[-read.reference_length:]==read.query_sequence and len(fragment_seq)==-read.template_length: # print('rv", "considered',type=int,required=True) parser.add_argument('--size_range',help='range of read sizes to be included',nargs=2, type=int, required=True)", "= time.time() p = Pool(processes=CPU) #use the available CPU sublists", "= pd.Series(GC_dict[length]).reset_index() current = current.rename(columns={'index':'num_GC',0:'number_of_fragments'}) current['length']=length current = current[['length','num_GC','number_of_fragments']] GC_df", "parser.add_argument('--bam_file_name', help='sample name (does not need to match actual file", "-read.template_length<=size_range[1]): #qc filters, some longer fragments are considered 'improper pairs'", "file warning bam_file = pysam.AlignmentFile(bam_file_path, \"rb\") print('sublist intervals:',len(sublist)) #this might", "GC_dict_list = p.map(collect_reads, sublists, 1) # In[ ]: all_GC_df =", "read.is_qcfail==False: if read.is_reverse==False: read_start = read.reference_start read_end = read.reference_start+read.template_length elif", "= ['chr'+str(m) for m in range(1,23)] mapable_intervals = mapable_intervals[mapable_intervals[0].isin(chroms)] print('chroms:',", "for GC bias results',required=True) parser.add_argument('--map_q',help='minimum mapping quality for reads to", "multiprocessing import Pool # In[ ]: # ##arguments for testing", "print(read.query_sequence,len(read.query_sequence),'fw') # print(len(fragment_seq),read.template_length) # print('\\n') # elif read.is_reverse==True: # if", "(positive template length) and rv (negative template length) reads if", "print('sublist intervals:',len(sublist)) #this might also need to be in the", "for read in fetched: #use both fw (positive template length)", "np import time import argparse import sys from multiprocessing import", "print(fragment_seq[0:read.reference_length],read.reference_length,'fw') # print(read.query_sequence,len(read.query_sequence),'fw') # print(len(fragment_seq),read.template_length) # print('\\n') # elif read.is_reverse==True:", "in range(0,length+1): GC_dict[length][num_GC]=0 #import the bam file #this needs to", "to be used in GC correction, bedGraph or bed foramt',", "the reference seq',required=True) parser.add_argument('--out_dir',help='folder for GC bias results',required=True) parser.add_argument('--map_q',help='minimum mapping", "sys.stdout.flush() #fetch any read that overlaps the inteterval (don't need", "rv (negative template length) reads if (read.is_reverse==False and read.template_length>=size_range[0] and", "# In[ ]: all_GC_df = pd.DataFrame() for i,GC_dict in enumerate(GC_dict_list):", "bam_file.fetch(chrom,start,end) for read in fetched: #use both fw (positive template", "frequency of each read length and GC content GC_dict =", "sublists = np.array_split(mapable_intervals,CPU) #split the list into sublists, one per", "the ref_seq ref_seq=pysam.FastaFile(ref_seq_path) for i in range(len(sublist)): chrom = sublist.iloc[i][0]", "per CPU GC_dict_list = p.map(collect_reads, sublists, 1) # In[ ]:", "In[ ]: print('done') # In[ ]: # In[ ]: #", "provided:') print('\\tbam_file_path = \"'+bam_file_path+'\"') print('\\tbam_file_name = \"'+bam_file_name+'\"') print('\\tmapable_regions = \"'+mapable_path+'\"')", "of read sizes to be included',nargs=2, type=int, required=True) parser.add_argument('--CPU',help='number of", "+'/'+mapable_name): os.mkdir(out_dir +'/'+mapable_name) if not os.path.exists(out_dir +'/'+mapable_name+'/GC_counts/'): os.mkdir(out_dir +'/'+mapable_name+'/GC_counts/') #", "p.map(collect_reads, sublists, 1) # In[ ]: all_GC_df = pd.DataFrame() for", "to keep these if read.is_paired==True and read.mapping_quality>=map_q and read.is_duplicate==False and", "time.time() p = Pool(processes=CPU) #use the available CPU sublists =", "In[ ]: def collect_reads(sublist): #create a dict for holding the", "is done right fragment_seq = fragment_seq.astype(int) num_GC = int(fragment_seq.sum()) GC_dict[abs(read.template_length)][num_GC]+=1", "read.is_paired==True and read.mapping_quality>=map_q and read.is_duplicate==False and read.is_qcfail==False: if read.is_reverse==False: read_start", "an integer) #should be very rare if the filter is", "results',required=True) parser.add_argument('--map_q',help='minimum mapping quality for reads to be considered',type=int,required=True) parser.add_argument('--size_range',help='range", "in GC correction, bedGraph or bed foramt', required=True) parser.add_argument('--ref_seq',help='reference sequence", "\"'+ref_seq_path+'\"') print('\\tchrom_sizes_path = \"'+chrom_sizes_path+'\"') print('\\tout_dir = \"'+out_dir+'\"') print('\\tmap_q = '+str(map_q))", "need to be in the loop #import the ref_seq ref_seq=pysam.FastaFile(ref_seq_path)", "and convert to numpy array fragment_seq = np.array(list(fragment_seq)) #replace with", "num_GC = int(fragment_seq.sum()) GC_dict[abs(read.template_length)][num_GC]+=1 print('done') return(GC_dict) # In[ ]: start_time", "#!/usr/bin/env python # coding: utf-8 # In[ ]: import pysam", "range(len(sublist)): chrom = sublist.iloc[i][0] start = sublist.iloc[i][1] end = sublist.iloc[i][2]", "parser.add_argument('--ref_seq',help='reference sequence (fasta format)',required=True) parser.add_argument('--chrom_sizes',help='path to chromosome sizes for the", "parser = argparse.ArgumentParser() parser.add_argument('--bam_file', help='sample_bam_file', required=True) parser.add_argument('--bam_file_name', help='sample name (does", "In[ ]: all_GC_df = pd.DataFrame() for i,GC_dict in enumerate(GC_dict_list): GC_df", "not os.path.exists(out_dir +'/'+mapable_name): os.mkdir(out_dir +'/'+mapable_name) if not os.path.exists(out_dir +'/'+mapable_name+'/GC_counts/'): os.mkdir(out_dir", "= mapable_path.rsplit('/',1)[1].rsplit('.',1)[0] out_file = out_dir +'/'+mapable_name+'/GC_counts/'+ bam_file_name+'.GC_counts.txt' print('out_file',out_file) # In[", "I would like to keep these if read.is_paired==True and read.mapping_quality>=map_q", "reference seq',required=True) parser.add_argument('--out_dir',help='folder for GC bias results',required=True) parser.add_argument('--map_q',help='minimum mapping quality", "In[ ]: print('arguments provided:') print('\\tbam_file_path = \"'+bam_file_path+'\"') print('\\tbam_file_name = \"'+bam_file_name+'\"')", "current[['length','num_GC','number_of_fragments']] GC_df = GC_df.append(current, ignore_index=True) GC_df = GC_df.set_index(['length','num_GC']) all_GC_df[i] =", "bam_file = pysam.AlignmentFile(bam_file_path, \"rb\") print('sublist intervals:',len(sublist)) #this might also need", "= args.chrom_sizes out_dir = args.out_dir map_q = args.map_q size_range =", "= {0:'number_of_fragments'}) all_GC_df = all_GC_df.reset_index() all_GC_df.to_csv(out_file,sep='\\t',index=False) # In[ ]: print('done')", "return(GC_dict) # In[ ]: start_time = time.time() p = Pool(processes=CPU)", "import time import argparse import sys from multiprocessing import Pool", "bam_file_path = args.bam_file bam_file_name = args.bam_file_name mapable_path=args.mapable_regions ref_seq_path = args.ref_seq", "(does not need to match actual file name)', required=True) parser.add_argument('--mapable_regions',", "pd.read_csv(mapable_path, sep='\\t', header=None) #remove non standard chromosomes and X and", "length) reads if (read.is_reverse==False and read.template_length>=size_range[0] and read.template_length<=size_range[1]) or (read.is_reverse==True", "inteterval (don't need to extend the interval because the fetch", "def collect_reads(sublist): #create a dict for holding the frequency of", "(negative template length) reads if (read.is_reverse==False and read.template_length>=size_range[0] and read.template_length<=size_range[1])", "fragment_seq=fragment_seq.replace('g','G').replace('c','C').replace('a','A').replace('t','T').replace('n','N') # ################# # ##logic check#### # ################# # if", "# In[ ]: parser = argparse.ArgumentParser() parser.add_argument('--bam_file', help='sample_bam_file', required=True) parser.add_argument('--bam_file_name',", "read_end = read.reference_start + read.reference_length read_start = read_end + read.template_length", "current.rename(columns={'index':'num_GC',0:'number_of_fragments'}) current['length']=length current = current[['length','num_GC','number_of_fragments']] GC_df = GC_df.append(current, ignore_index=True) GC_df", "to chromosome sizes for the reference seq',required=True) parser.add_argument('--out_dir',help='folder for GC", "m in range(1,23)] mapable_intervals = mapable_intervals[mapable_intervals[0].isin(chroms)] print('chroms:', chroms) print('number_of_intervals:',len(mapable_intervals)) sys.stdout.flush()", "print('\\n') # ################# #split and convert to numpy array fragment_seq", "'improper pairs' but I would like to keep these if", "# print('\\n') # elif read.is_reverse==True: # if fragment_seq[-read.reference_length:]==read.query_sequence and len(fragment_seq)==-read.template_length:", "bam file #this needs to be done within the loop", "fw (positive template length) and rv (negative template length) reads", "if fragment_seq[-read.reference_length:]==read.query_sequence and len(fragment_seq)==-read.template_length: # print('rv match',read.reference_length) # else: #", "bam_file_name = args.bam_file_name mapable_path=args.mapable_regions ref_seq_path = args.ref_seq chrom_sizes_path = args.chrom_sizes", "start = sublist.iloc[i][1] end = sublist.iloc[i][2] if i%5000==0: print('interval',i,':',chrom,start,end,'seconds:',np.round(time.time()-start_time)) sys.stdout.flush()", "ref_seq.fetch(read.reference_name,read_start,read_end) #tally up the GC content fragment_seq=fragment_seq.replace('g','G').replace('c','C').replace('a','A').replace('t','T').replace('n','N') # ################# #", "# In[ ]: mapable_name = mapable_path.rsplit('/',1)[1].rsplit('.',1)[0] out_file = out_dir +'/'+mapable_name+'/GC_counts/'+", "# map_q = 20 # size_range = [15,500] # CPU", "fragments are considered 'improper pairs' but I would like to", "are considered 'improper pairs' but I would like to keep", "fragment_seq = ref_seq.fetch(read.reference_name,read_start,read_end) #tally up the GC content fragment_seq=fragment_seq.replace('g','G').replace('c','C').replace('a','A').replace('t','T').replace('n','N') #", "]: def collect_reads(sublist): #create a dict for holding the frequency", "some longer fragments are considered 'improper pairs' but I would", "length and GC content GC_dict = {} for length in", "needs to be done within the loop otherwise it gives", "# ################# # ##logic check#### # ################# # if read.is_reverse==False:", "values fragment_seq[(fragment_seq=='G') | (fragment_seq=='C')]=1 fragment_seq[(fragment_seq=='A') | (fragment_seq=='T')]=0 fragment_seq[(fragment_seq=='N')]=np.random.randint(2) #choose a", "= np.array(list(fragment_seq)) #replace with values fragment_seq[(fragment_seq=='G') | (fragment_seq=='C')]=1 fragment_seq[(fragment_seq=='A') |", "\"'+out_dir+'\"') print('\\tmap_q = '+str(map_q)) print('\\tsize_range = '+str(size_range)) print('\\tCPU = '+str(CPU))", "##arguments for testing # bam_file_path = '/fh/scratch/delete90/ha_g/realigned_bams/cfDNA_MBC_ULP_hg38/realign_bam_paired_snakemake-master/results/MBC_1041_1_ULP/MBC_1041_1_ULP_recalibrated.bam' # bam_file_name =", "the frequency of each read length and GC content GC_dict", "\"'+bam_file_path+'\"') print('\\tbam_file_name = \"'+bam_file_name+'\"') print('\\tmapable_regions = \"'+mapable_path+'\"') print('\\tref_seq_path = \"'+ref_seq_path+'\"')", "if the filter is done right fragment_seq = fragment_seq.astype(int) num_GC", "to extend the interval because the fetch function does this", "#use the available CPU sublists = np.array_split(mapable_intervals,CPU) #split the list", "convert to numpy array fragment_seq = np.array(list(fragment_seq)) #replace with values", "the bam file #this needs to be done within the", "+ read.template_length fragment_seq = ref_seq.fetch(read.reference_name,read_start,read_end) #tally up the GC content", "]: all_GC_df = pd.DataFrame() for i,GC_dict in enumerate(GC_dict_list): GC_df =", "]: #import filter mapable_intervals = pd.read_csv(mapable_path, sep='\\t', header=None) #remove non", "\"'+mapable_path+'\"') print('\\tref_seq_path = \"'+ref_seq_path+'\"') print('\\tchrom_sizes_path = \"'+chrom_sizes_path+'\"') print('\\tout_dir = \"'+out_dir+'\"')", "current = current.rename(columns={'index':'num_GC',0:'number_of_fragments'}) current['length']=length current = current[['length','num_GC','number_of_fragments']] GC_df = GC_df.append(current,", "out_dir = './tmp/' # map_q = 20 # size_range =", "# bam_file_path = '/fh/scratch/delete90/ha_g/realigned_bams/cfDNA_MBC_ULP_hg38/realign_bam_paired_snakemake-master/results/MBC_1041_1_ULP/MBC_1041_1_ULP_recalibrated.bam' # bam_file_name = 'MBC_1041_1_ULP' # mapable_path", "= args.size_range CPU = args.CPU # In[ ]: print('arguments provided:')", "to be in the loop #import the ref_seq ref_seq=pysam.FastaFile(ref_seq_path) for", "= np.array_split(mapable_intervals,CPU) #split the list into sublists, one per CPU", "os import pandas as pd import numpy as np import", "fragment_seq[-read.reference_length:]==read.query_sequence and len(fragment_seq)==-read.template_length: # print('rv match',read.reference_length) # else: # print(fragment_seq[-read.reference_length:],read.reference_length,'rv')", "bam_file_name+'.GC_counts.txt' print('out_file',out_file) # In[ ]: #create a directory for the", "array fragment_seq = np.array(list(fragment_seq)) #replace with values fragment_seq[(fragment_seq=='G') | (fragment_seq=='C')]=1", "required=True) parser.add_argument('--mapable_regions', help='highly mapable regions to be used in GC", "for reads to be considered',type=int,required=True) parser.add_argument('--size_range',help='range of read sizes to", "print('done') # In[ ]: # In[ ]: # In[ ]:", "= './tmp/' # map_q = 20 # size_range = [15,500]", "# In[ ]: start_time = time.time() p = Pool(processes=CPU) #use", "]: import pysam import os import pandas as pd import", "read that overlaps the inteterval (don't need to extend the", "automatically) fetched = bam_file.fetch(chrom,start,end) for read in fetched: #use both", "mapable_intervals = pd.read_csv(mapable_path, sep='\\t', header=None) #remove non standard chromosomes and", "= sublist.iloc[i][2] if i%5000==0: print('interval',i,':',chrom,start,end,'seconds:',np.round(time.time()-start_time)) sys.stdout.flush() #fetch any read that", "################# # ##logic check#### # ################# # if read.is_reverse==False: #", "fragment_seq[(fragment_seq=='N')]=np.random.randint(2) #choose a random 0 or 1 for N (so", "to be done within the loop otherwise it gives a", "= int(fragment_seq.sum()) GC_dict[abs(read.template_length)][num_GC]+=1 print('done') return(GC_dict) # In[ ]: start_time =", "= args.out_dir map_q = args.map_q size_range = args.size_range CPU =", "considered 'improper pairs' but I would like to keep these", "coding: utf-8 # In[ ]: import pysam import os import", "argparse import sys from multiprocessing import Pool # In[ ]:", "= '/fh/scratch/delete90/ha_g/realigned_bams/cfDNA_MBC_ULP_hg38/realign_bam_paired_snakemake-master/results/MBC_1041_1_ULP/MBC_1041_1_ULP_recalibrated.bam' # bam_file_name = 'MBC_1041_1_ULP' # mapable_path = '../../downloads/genome/repeat_masker.mapable.k50.Umap.hg38.bedGraph'", "Y chroms = ['chr'+str(m) for m in range(1,23)] mapable_intervals =", "all_GC_df = all_GC_df.reset_index() all_GC_df.to_csv(out_file,sep='\\t',index=False) # In[ ]: print('done') # In[", "chroms) print('number_of_intervals:',len(mapable_intervals)) sys.stdout.flush() # In[ ]: def collect_reads(sublist): #create a", "GC content GC_dict = {} for length in range(size_range[0],size_range[1]+1): GC_dict[length]={}", "read sizes to be included',nargs=2, type=int, required=True) parser.add_argument('--CPU',help='number of CPU", "GC_df = GC_df.set_index(['length','num_GC']) all_GC_df[i] = GC_df['number_of_fragments'] del(GC_df,GC_dict) all_GC_df = all_GC_df.sum(axis=1)", "(fragment_seq=='T')]=0 fragment_seq[(fragment_seq=='N')]=np.random.randint(2) #choose a random 0 or 1 for N", "#use both fw (positive template length) and rv (negative template", "reads to be considered',type=int,required=True) parser.add_argument('--size_range',help='range of read sizes to be", "GC_df.append(current, ignore_index=True) GC_df = GC_df.set_index(['length','num_GC']) all_GC_df[i] = GC_df['number_of_fragments'] del(GC_df,GC_dict) all_GC_df", "+'/'+mapable_name+'/GC_counts/') # In[ ]: #import filter mapable_intervals = pd.read_csv(mapable_path, sep='\\t',", "import os import pandas as pd import numpy as np", "# ################# #split and convert to numpy array fragment_seq =", "print('\\tchrom_sizes_path = \"'+chrom_sizes_path+'\"') print('\\tout_dir = \"'+out_dir+'\"') print('\\tmap_q = '+str(map_q)) print('\\tsize_range", "mapping quality for reads to be considered',type=int,required=True) parser.add_argument('--size_range',help='range of read", "read_end + read.template_length fragment_seq = ref_seq.fetch(read.reference_name,read_start,read_end) #tally up the GC", "all_GC_df.to_csv(out_file,sep='\\t',index=False) # In[ ]: print('done') # In[ ]: # In[", "file name)', required=True) parser.add_argument('--mapable_regions', help='highly mapable regions to be used", "fragment_seq.astype(int) num_GC = int(fragment_seq.sum()) GC_dict[abs(read.template_length)][num_GC]+=1 print('done') return(GC_dict) # In[ ]:", "= '+str(size_range)) print('\\tCPU = '+str(CPU)) # In[ ]: mapable_name =", "# print(read.query_sequence,len(read.query_sequence),'rv') # print(len(fragment_seq),read.template_length) # print('\\n') # ################# #split and", "#split the list into sublists, one per CPU GC_dict_list =", "with values fragment_seq[(fragment_seq=='G') | (fragment_seq=='C')]=1 fragment_seq[(fragment_seq=='A') | (fragment_seq=='T')]=0 fragment_seq[(fragment_seq=='N')]=np.random.randint(2) #choose", "end = sublist.iloc[i][2] if i%5000==0: print('interval',i,':',chrom,start,end,'seconds:',np.round(time.time()-start_time)) sys.stdout.flush() #fetch any read", "required=True) args = parser.parse_args() bam_file_path = args.bam_file bam_file_name = args.bam_file_name", "In[ ]: #create a directory for the GC data if", "read.is_reverse==True: read_end = read.reference_start + read.reference_length read_start = read_end +", "= sublist.iloc[i][0] start = sublist.iloc[i][1] end = sublist.iloc[i][2] if i%5000==0:", "pysam.AlignmentFile(bam_file_path, \"rb\") print('sublist intervals:',len(sublist)) #this might also need to be", "dict for holding the frequency of each read length and", "sys from multiprocessing import Pool # In[ ]: # ##arguments", "'/fh/fast/ha_g/grp/reference/GRCh38/GRCh38.fa' # chrom_sizes_path = '/fh/fast/ha_g/grp/reference/GRCh38/hg38.standard.chrom.sizes' # out_dir = './tmp/' #", "# size_range = [15,500] # CPU = 4 # In[", "# In[ ]: # ##arguments for testing # bam_file_path =", "current['length']=length current = current[['length','num_GC','number_of_fragments']] GC_df = GC_df.append(current, ignore_index=True) GC_df =", "reads if (read.is_reverse==False and read.template_length>=size_range[0] and read.template_length<=size_range[1]) or (read.is_reverse==True and", "read.is_duplicate==False and read.is_qcfail==False: if read.is_reverse==False: read_start = read.reference_start read_end =", "print('\\tCPU = '+str(CPU)) # In[ ]: mapable_name = mapable_path.rsplit('/',1)[1].rsplit('.',1)[0] out_file", "sublist.iloc[i][2] if i%5000==0: print('interval',i,':',chrom,start,end,'seconds:',np.round(time.time()-start_time)) sys.stdout.flush() #fetch any read that overlaps", "sizes to be included',nargs=2, type=int, required=True) parser.add_argument('--CPU',help='number of CPU for", "# print('\\n') # ################# #split and convert to numpy array", "# print('rv match',read.reference_length) # else: # print(fragment_seq[-read.reference_length:],read.reference_length,'rv') # print(read.query_sequence,len(read.query_sequence),'rv') #", "name (does not need to match actual file name)', required=True)", "read.is_reverse==False: read_start = read.reference_start read_end = read.reference_start+read.template_length elif read.is_reverse==True: read_end", "print('out_file',out_file) # In[ ]: #create a directory for the GC", "for i,GC_dict in enumerate(GC_dict_list): GC_df = pd.DataFrame() for length in", "a dict for holding the frequency of each read length", "for num_GC in range(0,length+1): GC_dict[length][num_GC]=0 #import the bam file #this", "print('\\tref_seq_path = \"'+ref_seq_path+'\"') print('\\tchrom_sizes_path = \"'+chrom_sizes_path+'\"') print('\\tout_dir = \"'+out_dir+'\"') print('\\tmap_q", "extend the interval because the fetch function does this automatically)", "= GC_df.append(current, ignore_index=True) GC_df = GC_df.set_index(['length','num_GC']) all_GC_df[i] = GC_df['number_of_fragments'] del(GC_df,GC_dict)", "args.out_dir map_q = args.map_q size_range = args.size_range CPU = args.CPU", "= ref_seq.fetch(read.reference_name,read_start,read_end) #tally up the GC content fragment_seq=fragment_seq.replace('g','G').replace('c','C').replace('a','A').replace('t','T').replace('n','N') # #################", "and Y chroms = ['chr'+str(m) for m in range(1,23)] mapable_intervals", "if read.is_reverse==False: read_start = read.reference_start read_end = read.reference_start+read.template_length elif read.is_reverse==True:", "up the GC content fragment_seq=fragment_seq.replace('g','G').replace('c','C').replace('a','A').replace('t','T').replace('n','N') # ################# # ##logic check####", "1 for N (so that you always get an integer)", "content fragment_seq=fragment_seq.replace('g','G').replace('c','C').replace('a','A').replace('t','T').replace('n','N') # ################# # ##logic check#### # ################# #", "# coding: utf-8 # In[ ]: import pysam import os", "like to keep these if read.is_paired==True and read.mapping_quality>=map_q and read.is_duplicate==False", "print('\\tbam_file_name = \"'+bam_file_name+'\"') print('\\tmapable_regions = \"'+mapable_path+'\"') print('\\tref_seq_path = \"'+ref_seq_path+'\"') print('\\tchrom_sizes_path", "len(fragment_seq)==-read.template_length: # print('rv match',read.reference_length) # else: # print(fragment_seq[-read.reference_length:],read.reference_length,'rv') # print(read.query_sequence,len(read.query_sequence),'rv')", "and rv (negative template length) reads if (read.is_reverse==False and read.template_length>=size_range[0]", "python # coding: utf-8 # In[ ]: import pysam import", "'../../downloads/genome/repeat_masker.mapable.k50.Umap.hg38.bedGraph' # ref_seq_path = '/fh/fast/ha_g/grp/reference/GRCh38/GRCh38.fa' # chrom_sizes_path = '/fh/fast/ha_g/grp/reference/GRCh38/hg38.standard.chrom.sizes' #", "(read.is_reverse==True and -read.template_length>=size_range[0] and -read.template_length<=size_range[1]): #qc filters, some longer fragments", "numpy as np import time import argparse import sys from", "if fragment_seq[0:read.reference_length]==read.query_sequence and len(fragment_seq)==read.template_length: # print('fw match',read.reference_length) # else: #", "= argparse.ArgumentParser() parser.add_argument('--bam_file', help='sample_bam_file', required=True) parser.add_argument('--bam_file_name', help='sample name (does not", "format)',required=True) parser.add_argument('--chrom_sizes',help='path to chromosome sizes for the reference seq',required=True) parser.add_argument('--out_dir',help='folder", "non standard chromosomes and X and Y chroms = ['chr'+str(m)", "and read.template_length<=size_range[1]) or (read.is_reverse==True and -read.template_length>=size_range[0] and -read.template_length<=size_range[1]): #qc filters,", "because the fetch function does this automatically) fetched = bam_file.fetch(chrom,start,end)", "and read.is_qcfail==False: if read.is_reverse==False: read_start = read.reference_start read_end = read.reference_start+read.template_length", "be considered',type=int,required=True) parser.add_argument('--size_range',help='range of read sizes to be included',nargs=2, type=int,", "bias results',required=True) parser.add_argument('--map_q',help='minimum mapping quality for reads to be considered',type=int,required=True)", "os.path.exists(out_dir +'/'+mapable_name+'/GC_counts/'): os.mkdir(out_dir +'/'+mapable_name+'/GC_counts/') # In[ ]: #import filter mapable_intervals", "within the loop otherwise it gives a truncated file warning", "but I would like to keep these if read.is_paired==True and", "ignore_index=True) GC_df = GC_df.set_index(['length','num_GC']) all_GC_df[i] = GC_df['number_of_fragments'] del(GC_df,GC_dict) all_GC_df =", "i in range(len(sublist)): chrom = sublist.iloc[i][0] start = sublist.iloc[i][1] end", "and read.is_duplicate==False and read.is_qcfail==False: if read.is_reverse==False: read_start = read.reference_start read_end", "= 4 # In[ ]: parser = argparse.ArgumentParser() parser.add_argument('--bam_file', help='sample_bam_file',", "GC bias results',required=True) parser.add_argument('--map_q',help='minimum mapping quality for reads to be", "= pd.DataFrame(all_GC_df).rename(columns = {0:'number_of_fragments'}) all_GC_df = all_GC_df.reset_index() all_GC_df.to_csv(out_file,sep='\\t',index=False) # In[", "#create a dict for holding the frequency of each read", "does this automatically) fetched = bam_file.fetch(chrom,start,end) for read in fetched:", "regions to be used in GC correction, bedGraph or bed", "in the loop #import the ref_seq ref_seq=pysam.FastaFile(ref_seq_path) for i in", "pd import numpy as np import time import argparse import", "seq',required=True) parser.add_argument('--out_dir',help='folder for GC bias results',required=True) parser.add_argument('--map_q',help='minimum mapping quality for", "= GC_df['number_of_fragments'] del(GC_df,GC_dict) all_GC_df = all_GC_df.sum(axis=1) all_GC_df = pd.DataFrame(all_GC_df).rename(columns =", "mapable_path = '../../downloads/genome/repeat_masker.mapable.k50.Umap.hg38.bedGraph' # ref_seq_path = '/fh/fast/ha_g/grp/reference/GRCh38/GRCh38.fa' # chrom_sizes_path =", "a directory for the GC data if not os.path.exists(out_dir +'/'+mapable_name):", "range(1,23)] mapable_intervals = mapable_intervals[mapable_intervals[0].isin(chroms)] print('chroms:', chroms) print('number_of_intervals:',len(mapable_intervals)) sys.stdout.flush() # In[", "in range(size_range[0],size_range[1]+1): GC_dict[length]={} for num_GC in range(0,length+1): GC_dict[length][num_GC]=0 #import the", "read.template_length>=size_range[0] and read.template_length<=size_range[1]) or (read.is_reverse==True and -read.template_length>=size_range[0] and -read.template_length<=size_range[1]): #qc", "the available CPU sublists = np.array_split(mapable_intervals,CPU) #split the list into", "p = Pool(processes=CPU) #use the available CPU sublists = np.array_split(mapable_intervals,CPU)", "both fw (positive template length) and rv (negative template length)", "map_q = 20 # size_range = [15,500] # CPU =", "N (so that you always get an integer) #should be", "for the GC data if not os.path.exists(out_dir +'/'+mapable_name): os.mkdir(out_dir +'/'+mapable_name)", "GC correction, bedGraph or bed foramt', required=True) parser.add_argument('--ref_seq',help='reference sequence (fasta", "and GC content GC_dict = {} for length in range(size_range[0],size_range[1]+1):", "of CPU for parallelizing', type=int, required=True) args = parser.parse_args() bam_file_path", "parser.add_argument('--CPU',help='number of CPU for parallelizing', type=int, required=True) args = parser.parse_args()", "{} for length in range(size_range[0],size_range[1]+1): GC_dict[length]={} for num_GC in range(0,length+1):", "= out_dir +'/'+mapable_name+'/GC_counts/'+ bam_file_name+'.GC_counts.txt' print('out_file',out_file) # In[ ]: #create a", "start_time = time.time() p = Pool(processes=CPU) #use the available CPU", "range(0,length+1): GC_dict[length][num_GC]=0 #import the bam file #this needs to be", "args.map_q size_range = args.size_range CPU = args.CPU # In[ ]:", "+'/'+mapable_name) if not os.path.exists(out_dir +'/'+mapable_name+'/GC_counts/'): os.mkdir(out_dir +'/'+mapable_name+'/GC_counts/') # In[ ]:", "# print(fragment_seq[0:read.reference_length],read.reference_length,'fw') # print(read.query_sequence,len(read.query_sequence),'fw') # print(len(fragment_seq),read.template_length) # print('\\n') # elif", "pysam import os import pandas as pd import numpy as", "# print(len(fragment_seq),read.template_length) # print('\\n') # elif read.is_reverse==True: # if fragment_seq[-read.reference_length:]==read.query_sequence", "match',read.reference_length) # else: # print(fragment_seq[-read.reference_length:],read.reference_length,'rv') # print(read.query_sequence,len(read.query_sequence),'rv') # print(len(fragment_seq),read.template_length) #", "# print(fragment_seq[-read.reference_length:],read.reference_length,'rv') # print(read.query_sequence,len(read.query_sequence),'rv') # print(len(fragment_seq),read.template_length) # print('\\n') # #################", "{0:'number_of_fragments'}) all_GC_df = all_GC_df.reset_index() all_GC_df.to_csv(out_file,sep='\\t',index=False) # In[ ]: print('done') #", "sizes for the reference seq',required=True) parser.add_argument('--out_dir',help='folder for GC bias results',required=True)", "if not os.path.exists(out_dir +'/'+mapable_name): os.mkdir(out_dir +'/'+mapable_name) if not os.path.exists(out_dir +'/'+mapable_name+'/GC_counts/'):", "print('\\tmapable_regions = \"'+mapable_path+'\"') print('\\tref_seq_path = \"'+ref_seq_path+'\"') print('\\tchrom_sizes_path = \"'+chrom_sizes_path+'\"') print('\\tout_dir", "mapable_name = mapable_path.rsplit('/',1)[1].rsplit('.',1)[0] out_file = out_dir +'/'+mapable_name+'/GC_counts/'+ bam_file_name+'.GC_counts.txt' print('out_file',out_file) #", "print(fragment_seq[-read.reference_length:],read.reference_length,'rv') # print(read.query_sequence,len(read.query_sequence),'rv') # print(len(fragment_seq),read.template_length) # print('\\n') # ################# #split", "print(len(fragment_seq),read.template_length) # print('\\n') # elif read.is_reverse==True: # if fragment_seq[-read.reference_length:]==read.query_sequence and", "#import filter mapable_intervals = pd.read_csv(mapable_path, sep='\\t', header=None) #remove non standard", "# CPU = 4 # In[ ]: parser = argparse.ArgumentParser()", "all_GC_df[i] = GC_df['number_of_fragments'] del(GC_df,GC_dict) all_GC_df = all_GC_df.sum(axis=1) all_GC_df = pd.DataFrame(all_GC_df).rename(columns", "size_range = [15,500] # CPU = 4 # In[ ]:", "mapable regions to be used in GC correction, bedGraph or", "[15,500] # CPU = 4 # In[ ]: parser =", "= {} for length in range(size_range[0],size_range[1]+1): GC_dict[length]={} for num_GC in", "a random 0 or 1 for N (so that you", "+'/'+mapable_name+'/GC_counts/'+ bam_file_name+'.GC_counts.txt' print('out_file',out_file) # In[ ]: #create a directory for", "that you always get an integer) #should be very rare", "type=int, required=True) parser.add_argument('--CPU',help='number of CPU for parallelizing', type=int, required=True) args", "print(read.query_sequence,len(read.query_sequence),'rv') # print(len(fragment_seq),read.template_length) # print('\\n') # ################# #split and convert", "size_range = args.size_range CPU = args.CPU # In[ ]: print('arguments", "read length and GC content GC_dict = {} for length", "= pd.DataFrame() for i,GC_dict in enumerate(GC_dict_list): GC_df = pd.DataFrame() for", "GC_dict[abs(read.template_length)][num_GC]+=1 print('done') return(GC_dict) # In[ ]: start_time = time.time() p", "required=True) parser.add_argument('--CPU',help='number of CPU for parallelizing', type=int, required=True) args =", "read.mapping_quality>=map_q and read.is_duplicate==False and read.is_qcfail==False: if read.is_reverse==False: read_start = read.reference_start", "fragment_seq[(fragment_seq=='G') | (fragment_seq=='C')]=1 fragment_seq[(fragment_seq=='A') | (fragment_seq=='T')]=0 fragment_seq[(fragment_seq=='N')]=np.random.randint(2) #choose a random", "Pool # In[ ]: # ##arguments for testing # bam_file_path", "pd.DataFrame() for i,GC_dict in enumerate(GC_dict_list): GC_df = pd.DataFrame() for length", "help='highly mapable regions to be used in GC correction, bedGraph", "GC_dict.keys(): current = pd.Series(GC_dict[length]).reset_index() current = current.rename(columns={'index':'num_GC',0:'number_of_fragments'}) current['length']=length current =", "pd.DataFrame() for length in GC_dict.keys(): current = pd.Series(GC_dict[length]).reset_index() current =", "import argparse import sys from multiprocessing import Pool # In[", "= sublist.iloc[i][1] end = sublist.iloc[i][2] if i%5000==0: print('interval',i,':',chrom,start,end,'seconds:',np.round(time.time()-start_time)) sys.stdout.flush() #fetch", "= pd.read_csv(mapable_path, sep='\\t', header=None) #remove non standard chromosomes and X", "you always get an integer) #should be very rare if", "del(GC_df,GC_dict) all_GC_df = all_GC_df.sum(axis=1) all_GC_df = pd.DataFrame(all_GC_df).rename(columns = {0:'number_of_fragments'}) all_GC_df", "available CPU sublists = np.array_split(mapable_intervals,CPU) #split the list into sublists,", "args.chrom_sizes out_dir = args.out_dir map_q = args.map_q size_range = args.size_range", "be used in GC correction, bedGraph or bed foramt', required=True)", "all_GC_df = pd.DataFrame(all_GC_df).rename(columns = {0:'number_of_fragments'}) all_GC_df = all_GC_df.reset_index() all_GC_df.to_csv(out_file,sep='\\t',index=False) #", "current = pd.Series(GC_dict[length]).reset_index() current = current.rename(columns={'index':'num_GC',0:'number_of_fragments'}) current['length']=length current = current[['length','num_GC','number_of_fragments']]", "for holding the frequency of each read length and GC", "CPU = args.CPU # In[ ]: print('arguments provided:') print('\\tbam_file_path =", "all_GC_df.reset_index() all_GC_df.to_csv(out_file,sep='\\t',index=False) # In[ ]: print('done') # In[ ]: #", "pairs' but I would like to keep these if read.is_paired==True", "= pysam.AlignmentFile(bam_file_path, \"rb\") print('sublist intervals:',len(sublist)) #this might also need to", "fetch function does this automatically) fetched = bam_file.fetch(chrom,start,end) for read", "and len(fragment_seq)==read.template_length: # print('fw match',read.reference_length) # else: # print(fragment_seq[0:read.reference_length],read.reference_length,'fw') #", "1) # In[ ]: all_GC_df = pd.DataFrame() for i,GC_dict in", "read.template_length<=size_range[1]) or (read.is_reverse==True and -read.template_length>=size_range[0] and -read.template_length<=size_range[1]): #qc filters, some", "longer fragments are considered 'improper pairs' but I would like", "it gives a truncated file warning bam_file = pysam.AlignmentFile(bam_file_path, \"rb\")", "i,GC_dict in enumerate(GC_dict_list): GC_df = pd.DataFrame() for length in GC_dict.keys():", "read.template_length fragment_seq = ref_seq.fetch(read.reference_name,read_start,read_end) #tally up the GC content fragment_seq=fragment_seq.replace('g','G').replace('c','C').replace('a','A').replace('t','T').replace('n','N')", "#remove non standard chromosomes and X and Y chroms =", "# In[ ]: def collect_reads(sublist): #create a dict for holding", "the loop otherwise it gives a truncated file warning bam_file", "'+str(CPU)) # In[ ]: mapable_name = mapable_path.rsplit('/',1)[1].rsplit('.',1)[0] out_file = out_dir", "In[ ]: #import filter mapable_intervals = pd.read_csv(mapable_path, sep='\\t', header=None) #remove", "content GC_dict = {} for length in range(size_range[0],size_range[1]+1): GC_dict[length]={} for", "int(fragment_seq.sum()) GC_dict[abs(read.template_length)][num_GC]+=1 print('done') return(GC_dict) # In[ ]: start_time = time.time()", "# else: # print(fragment_seq[-read.reference_length:],read.reference_length,'rv') # print(read.query_sequence,len(read.query_sequence),'rv') # print(len(fragment_seq),read.template_length) # print('\\n')", "= '../../downloads/genome/repeat_masker.mapable.k50.Umap.hg38.bedGraph' # ref_seq_path = '/fh/fast/ha_g/grp/reference/GRCh38/GRCh38.fa' # chrom_sizes_path = '/fh/fast/ha_g/grp/reference/GRCh38/hg38.standard.chrom.sizes'", "CPU for parallelizing', type=int, required=True) args = parser.parse_args() bam_file_path =", "os.path.exists(out_dir +'/'+mapable_name): os.mkdir(out_dir +'/'+mapable_name) if not os.path.exists(out_dir +'/'+mapable_name+'/GC_counts/'): os.mkdir(out_dir +'/'+mapable_name+'/GC_counts/')", "pandas as pd import numpy as np import time import", "(fragment_seq=='C')]=1 fragment_seq[(fragment_seq=='A') | (fragment_seq=='T')]=0 fragment_seq[(fragment_seq=='N')]=np.random.randint(2) #choose a random 0 or", "Pool(processes=CPU) #use the available CPU sublists = np.array_split(mapable_intervals,CPU) #split the", "print('chroms:', chroms) print('number_of_intervals:',len(mapable_intervals)) sys.stdout.flush() # In[ ]: def collect_reads(sublist): #create", "= 'MBC_1041_1_ULP' # mapable_path = '../../downloads/genome/repeat_masker.mapable.k50.Umap.hg38.bedGraph' # ref_seq_path = '/fh/fast/ha_g/grp/reference/GRCh38/GRCh38.fa'", "the fetch function does this automatically) fetched = bam_file.fetch(chrom,start,end) for", "be very rare if the filter is done right fragment_seq", "= args.ref_seq chrom_sizes_path = args.chrom_sizes out_dir = args.out_dir map_q =", "-read.template_length>=size_range[0] and -read.template_length<=size_range[1]): #qc filters, some longer fragments are considered", "function does this automatically) fetched = bam_file.fetch(chrom,start,end) for read in", "chrom_sizes_path = '/fh/fast/ha_g/grp/reference/GRCh38/hg38.standard.chrom.sizes' # out_dir = './tmp/' # map_q =", "elif read.is_reverse==True: read_end = read.reference_start + read.reference_length read_start = read_end", "args = parser.parse_args() bam_file_path = args.bam_file bam_file_name = args.bam_file_name mapable_path=args.mapable_regions", "# ref_seq_path = '/fh/fast/ha_g/grp/reference/GRCh38/GRCh38.fa' # chrom_sizes_path = '/fh/fast/ha_g/grp/reference/GRCh38/hg38.standard.chrom.sizes' # out_dir", "need to match actual file name)', required=True) parser.add_argument('--mapable_regions', help='highly mapable", "done within the loop otherwise it gives a truncated file", "'/fh/scratch/delete90/ha_g/realigned_bams/cfDNA_MBC_ULP_hg38/realign_bam_paired_snakemake-master/results/MBC_1041_1_ULP/MBC_1041_1_ULP_recalibrated.bam' # bam_file_name = 'MBC_1041_1_ULP' # mapable_path = '../../downloads/genome/repeat_masker.mapable.k50.Umap.hg38.bedGraph' #", "print('fw match',read.reference_length) # else: # print(fragment_seq[0:read.reference_length],read.reference_length,'fw') # print(read.query_sequence,len(read.query_sequence),'fw') # print(len(fragment_seq),read.template_length)", "filters, some longer fragments are considered 'improper pairs' but I", "get an integer) #should be very rare if the filter", "each read length and GC content GC_dict = {} for", "################# # if read.is_reverse==False: # if fragment_seq[0:read.reference_length]==read.query_sequence and len(fragment_seq)==read.template_length: #", "to match actual file name)', required=True) parser.add_argument('--mapable_regions', help='highly mapable regions", "out_file = out_dir +'/'+mapable_name+'/GC_counts/'+ bam_file_name+'.GC_counts.txt' print('out_file',out_file) # In[ ]: #create", "if i%5000==0: print('interval',i,':',chrom,start,end,'seconds:',np.round(time.time()-start_time)) sys.stdout.flush() #fetch any read that overlaps the", "testing # bam_file_path = '/fh/scratch/delete90/ha_g/realigned_bams/cfDNA_MBC_ULP_hg38/realign_bam_paired_snakemake-master/results/MBC_1041_1_ULP/MBC_1041_1_ULP_recalibrated.bam' # bam_file_name = 'MBC_1041_1_ULP' #", "= \"'+ref_seq_path+'\"') print('\\tchrom_sizes_path = \"'+chrom_sizes_path+'\"') print('\\tout_dir = \"'+out_dir+'\"') print('\\tmap_q =", "if not os.path.exists(out_dir +'/'+mapable_name+'/GC_counts/'): os.mkdir(out_dir +'/'+mapable_name+'/GC_counts/') # In[ ]: #import", "bam_file_name = 'MBC_1041_1_ULP' # mapable_path = '../../downloads/genome/repeat_masker.mapable.k50.Umap.hg38.bedGraph' # ref_seq_path =", "= args.bam_file_name mapable_path=args.mapable_regions ref_seq_path = args.ref_seq chrom_sizes_path = args.chrom_sizes out_dir", "be in the loop #import the ref_seq ref_seq=pysam.FastaFile(ref_seq_path) for i", "GC_dict[length]={} for num_GC in range(0,length+1): GC_dict[length][num_GC]=0 #import the bam file", "and read.template_length>=size_range[0] and read.template_length<=size_range[1]) or (read.is_reverse==True and -read.template_length>=size_range[0] and -read.template_length<=size_range[1]):", "= [15,500] # CPU = 4 # In[ ]: parser", "and X and Y chroms = ['chr'+str(m) for m in", "]: start_time = time.time() p = Pool(processes=CPU) #use the available", "# print(len(fragment_seq),read.template_length) # print('\\n') # ################# #split and convert to", "'+str(map_q)) print('\\tsize_range = '+str(size_range)) print('\\tCPU = '+str(CPU)) # In[ ]:", "len(fragment_seq)==read.template_length: # print('fw match',read.reference_length) # else: # print(fragment_seq[0:read.reference_length],read.reference_length,'fw') # print(read.query_sequence,len(read.query_sequence),'fw')", "sublist.iloc[i][1] end = sublist.iloc[i][2] if i%5000==0: print('interval',i,':',chrom,start,end,'seconds:',np.round(time.time()-start_time)) sys.stdout.flush() #fetch any", "CPU GC_dict_list = p.map(collect_reads, sublists, 1) # In[ ]: all_GC_df", "GC_df.set_index(['length','num_GC']) all_GC_df[i] = GC_df['number_of_fragments'] del(GC_df,GC_dict) all_GC_df = all_GC_df.sum(axis=1) all_GC_df =", "as pd import numpy as np import time import argparse", "= \"'+chrom_sizes_path+'\"') print('\\tout_dir = \"'+out_dir+'\"') print('\\tmap_q = '+str(map_q)) print('\\tsize_range =", "(read.is_reverse==False and read.template_length>=size_range[0] and read.template_length<=size_range[1]) or (read.is_reverse==True and -read.template_length>=size_range[0] and", "numpy array fragment_seq = np.array(list(fragment_seq)) #replace with values fragment_seq[(fragment_seq=='G') |", "print('arguments provided:') print('\\tbam_file_path = \"'+bam_file_path+'\"') print('\\tbam_file_name = \"'+bam_file_name+'\"') print('\\tmapable_regions =", "these if read.is_paired==True and read.mapping_quality>=map_q and read.is_duplicate==False and read.is_qcfail==False: if", "to be considered',type=int,required=True) parser.add_argument('--size_range',help='range of read sizes to be included',nargs=2,", "np.array(list(fragment_seq)) #replace with values fragment_seq[(fragment_seq=='G') | (fragment_seq=='C')]=1 fragment_seq[(fragment_seq=='A') | (fragment_seq=='T')]=0", "#choose a random 0 or 1 for N (so that", "time import argparse import sys from multiprocessing import Pool #", "that overlaps the inteterval (don't need to extend the interval", "sys.stdout.flush() # In[ ]: def collect_reads(sublist): #create a dict for", "# if read.is_reverse==False: # if fragment_seq[0:read.reference_length]==read.query_sequence and len(fragment_seq)==read.template_length: # print('fw", "or (read.is_reverse==True and -read.template_length>=size_range[0] and -read.template_length<=size_range[1]): #qc filters, some longer", "if read.is_reverse==False: # if fragment_seq[0:read.reference_length]==read.query_sequence and len(fragment_seq)==read.template_length: # print('fw match',read.reference_length)", "print('\\tmap_q = '+str(map_q)) print('\\tsize_range = '+str(size_range)) print('\\tCPU = '+str(CPU)) #", "+ read.reference_length read_start = read_end + read.template_length fragment_seq = ref_seq.fetch(read.reference_name,read_start,read_end)", "read.reference_start read_end = read.reference_start+read.template_length elif read.is_reverse==True: read_end = read.reference_start +", "and -read.template_length<=size_range[1]): #qc filters, some longer fragments are considered 'improper", "parser.add_argument('--map_q',help='minimum mapping quality for reads to be considered',type=int,required=True) parser.add_argument('--size_range',help='range of", "= 20 # size_range = [15,500] # CPU = 4", "if read.is_paired==True and read.mapping_quality>=map_q and read.is_duplicate==False and read.is_qcfail==False: if read.is_reverse==False:", "standard chromosomes and X and Y chroms = ['chr'+str(m) for", "# if fragment_seq[0:read.reference_length]==read.query_sequence and len(fragment_seq)==read.template_length: # print('fw match',read.reference_length) # else:", "num_GC in range(0,length+1): GC_dict[length][num_GC]=0 #import the bam file #this needs", "the interval because the fetch function does this automatically) fetched", "in range(len(sublist)): chrom = sublist.iloc[i][0] start = sublist.iloc[i][1] end =", "enumerate(GC_dict_list): GC_df = pd.DataFrame() for length in GC_dict.keys(): current =", "filter mapable_intervals = pd.read_csv(mapable_path, sep='\\t', header=None) #remove non standard chromosomes", "interval because the fetch function does this automatically) fetched =", "print(len(fragment_seq),read.template_length) # print('\\n') # ################# #split and convert to numpy", "chromosomes and X and Y chroms = ['chr'+str(m) for m", "(don't need to extend the interval because the fetch function", "all_GC_df = pd.DataFrame() for i,GC_dict in enumerate(GC_dict_list): GC_df = pd.DataFrame()", "# if fragment_seq[-read.reference_length:]==read.query_sequence and len(fragment_seq)==-read.template_length: # print('rv match',read.reference_length) # else:", "if (read.is_reverse==False and read.template_length>=size_range[0] and read.template_length<=size_range[1]) or (read.is_reverse==True and -read.template_length>=size_range[0]", "check#### # ################# # if read.is_reverse==False: # if fragment_seq[0:read.reference_length]==read.query_sequence and", "]: print('done') # In[ ]: # In[ ]: # In[", "bedGraph or bed foramt', required=True) parser.add_argument('--ref_seq',help='reference sequence (fasta format)',required=True) parser.add_argument('--chrom_sizes',help='path", "as np import time import argparse import sys from multiprocessing", "a truncated file warning bam_file = pysam.AlignmentFile(bam_file_path, \"rb\") print('sublist intervals:',len(sublist))", "= '/fh/fast/ha_g/grp/reference/GRCh38/GRCh38.fa' # chrom_sizes_path = '/fh/fast/ha_g/grp/reference/GRCh38/hg38.standard.chrom.sizes' # out_dir = './tmp/'", "data if not os.path.exists(out_dir +'/'+mapable_name): os.mkdir(out_dir +'/'+mapable_name) if not os.path.exists(out_dir", "any read that overlaps the inteterval (don't need to extend", "#tally up the GC content fragment_seq=fragment_seq.replace('g','G').replace('c','C').replace('a','A').replace('t','T').replace('n','N') # ################# # ##logic", "= Pool(processes=CPU) #use the available CPU sublists = np.array_split(mapable_intervals,CPU) #split", "fragment_seq[0:read.reference_length]==read.query_sequence and len(fragment_seq)==read.template_length: # print('fw match',read.reference_length) # else: # print(fragment_seq[0:read.reference_length],read.reference_length,'fw')", "(so that you always get an integer) #should be very", "# print(read.query_sequence,len(read.query_sequence),'fw') # print(len(fragment_seq),read.template_length) # print('\\n') # elif read.is_reverse==True: #", "= current[['length','num_GC','number_of_fragments']] GC_df = GC_df.append(current, ignore_index=True) GC_df = GC_df.set_index(['length','num_GC']) all_GC_df[i]", "]: parser = argparse.ArgumentParser() parser.add_argument('--bam_file', help='sample_bam_file', required=True) parser.add_argument('--bam_file_name', help='sample name", "bam_file_path = '/fh/scratch/delete90/ha_g/realigned_bams/cfDNA_MBC_ULP_hg38/realign_bam_paired_snakemake-master/results/MBC_1041_1_ULP/MBC_1041_1_ULP_recalibrated.bam' # bam_file_name = 'MBC_1041_1_ULP' # mapable_path =", "not need to match actual file name)', required=True) parser.add_argument('--mapable_regions', help='highly", "# ##arguments for testing # bam_file_path = '/fh/scratch/delete90/ha_g/realigned_bams/cfDNA_MBC_ULP_hg38/realign_bam_paired_snakemake-master/results/MBC_1041_1_ULP/MBC_1041_1_ULP_recalibrated.bam' # bam_file_name", "= \"'+bam_file_path+'\"') print('\\tbam_file_name = \"'+bam_file_name+'\"') print('\\tmapable_regions = \"'+mapable_path+'\"') print('\\tref_seq_path =", "= bam_file.fetch(chrom,start,end) for read in fetched: #use both fw (positive", "and -read.template_length>=size_range[0] and -read.template_length<=size_range[1]): #qc filters, some longer fragments are", "ref_seq_path = args.ref_seq chrom_sizes_path = args.chrom_sizes out_dir = args.out_dir map_q", "chrom = sublist.iloc[i][0] start = sublist.iloc[i][1] end = sublist.iloc[i][2] if", "#this might also need to be in the loop #import", "'/fh/fast/ha_g/grp/reference/GRCh38/hg38.standard.chrom.sizes' # out_dir = './tmp/' # map_q = 20 #", "or 1 for N (so that you always get an", "= args.CPU # In[ ]: print('arguments provided:') print('\\tbam_file_path = \"'+bam_file_path+'\"')", "import Pool # In[ ]: # ##arguments for testing #", "mapable_path=args.mapable_regions ref_seq_path = args.ref_seq chrom_sizes_path = args.chrom_sizes out_dir = args.out_dir", "for testing # bam_file_path = '/fh/scratch/delete90/ha_g/realigned_bams/cfDNA_MBC_ULP_hg38/realign_bam_paired_snakemake-master/results/MBC_1041_1_ULP/MBC_1041_1_ULP_recalibrated.bam' # bam_file_name = 'MBC_1041_1_ULP'", "and len(fragment_seq)==-read.template_length: # print('rv match',read.reference_length) # else: # print(fragment_seq[-read.reference_length:],read.reference_length,'rv') #", "os.mkdir(out_dir +'/'+mapable_name) if not os.path.exists(out_dir +'/'+mapable_name+'/GC_counts/'): os.mkdir(out_dir +'/'+mapable_name+'/GC_counts/') # In[", "parser.add_argument('--chrom_sizes',help='path to chromosome sizes for the reference seq',required=True) parser.add_argument('--out_dir',help='folder for", "np.array_split(mapable_intervals,CPU) #split the list into sublists, one per CPU GC_dict_list", "= '+str(map_q)) print('\\tsize_range = '+str(size_range)) print('\\tCPU = '+str(CPU)) # In[", "in GC_dict.keys(): current = pd.Series(GC_dict[length]).reset_index() current = current.rename(columns={'index':'num_GC',0:'number_of_fragments'}) current['length']=length current", "name)', required=True) parser.add_argument('--mapable_regions', help='highly mapable regions to be used in", "In[ ]: start_time = time.time() p = Pool(processes=CPU) #use the", "sep='\\t', header=None) #remove non standard chromosomes and X and Y", "| (fragment_seq=='C')]=1 fragment_seq[(fragment_seq=='A') | (fragment_seq=='T')]=0 fragment_seq[(fragment_seq=='N')]=np.random.randint(2) #choose a random 0", "for N (so that you always get an integer) #should", "keep these if read.is_paired==True and read.mapping_quality>=map_q and read.is_duplicate==False and read.is_qcfail==False:", "list into sublists, one per CPU GC_dict_list = p.map(collect_reads, sublists,", "the GC content fragment_seq=fragment_seq.replace('g','G').replace('c','C').replace('a','A').replace('t','T').replace('n','N') # ################# # ##logic check#### #", "elif read.is_reverse==True: # if fragment_seq[-read.reference_length:]==read.query_sequence and len(fragment_seq)==-read.template_length: # print('rv match',read.reference_length)", "done right fragment_seq = fragment_seq.astype(int) num_GC = int(fragment_seq.sum()) GC_dict[abs(read.template_length)][num_GC]+=1 print('done')", "= \"'+mapable_path+'\"') print('\\tref_seq_path = \"'+ref_seq_path+'\"') print('\\tchrom_sizes_path = \"'+chrom_sizes_path+'\"') print('\\tout_dir =", "fragment_seq = fragment_seq.astype(int) num_GC = int(fragment_seq.sum()) GC_dict[abs(read.template_length)][num_GC]+=1 print('done') return(GC_dict) #", "= read.reference_start + read.reference_length read_start = read_end + read.template_length fragment_seq", "#should be very rare if the filter is done right", "# In[ ]: print('arguments provided:') print('\\tbam_file_path = \"'+bam_file_path+'\"') print('\\tbam_file_name =", "warning bam_file = pysam.AlignmentFile(bam_file_path, \"rb\") print('sublist intervals:',len(sublist)) #this might also", "#qc filters, some longer fragments are considered 'improper pairs' but", "= all_GC_df.sum(axis=1) all_GC_df = pd.DataFrame(all_GC_df).rename(columns = {0:'number_of_fragments'}) all_GC_df = all_GC_df.reset_index()", "length in range(size_range[0],size_range[1]+1): GC_dict[length]={} for num_GC in range(0,length+1): GC_dict[length][num_GC]=0 #import", "used in GC correction, bedGraph or bed foramt', required=True) parser.add_argument('--ref_seq',help='reference", "for m in range(1,23)] mapable_intervals = mapable_intervals[mapable_intervals[0].isin(chroms)] print('chroms:', chroms) print('number_of_intervals:',len(mapable_intervals))", "print('\\n') # elif read.is_reverse==True: # if fragment_seq[-read.reference_length:]==read.query_sequence and len(fragment_seq)==-read.template_length: #", "ref_seq ref_seq=pysam.FastaFile(ref_seq_path) for i in range(len(sublist)): chrom = sublist.iloc[i][0] start", "print('number_of_intervals:',len(mapable_intervals)) sys.stdout.flush() # In[ ]: def collect_reads(sublist): #create a dict", "very rare if the filter is done right fragment_seq =", "quality for reads to be considered',type=int,required=True) parser.add_argument('--size_range',help='range of read sizes", "GC_df = pd.DataFrame() for length in GC_dict.keys(): current = pd.Series(GC_dict[length]).reset_index()", "in enumerate(GC_dict_list): GC_df = pd.DataFrame() for length in GC_dict.keys(): current", "# chrom_sizes_path = '/fh/fast/ha_g/grp/reference/GRCh38/hg38.standard.chrom.sizes' # out_dir = './tmp/' # map_q" ]
[ "in 'hours' are in 'User_ID' in 'employee_roster' # we get", "{'User_ID': 'id_user', 'Email_ID': 'id_email', 'Fullname': 'fullname'}, inplace = True) #", "= pd.DataFrame({'id_currency': (np.arange(len(currencies)) + 1), 'currency': currencies}) # send data", "values genders = sorted(pd.concat([employee_roster['Gender'], skills['Gender']], axis = 0).unique()) # create", "7659 'True' values, meaning that NOT all 'UserId' in 'hours'", "users_final.to_sql('DIM_User', con = connection, if_exists = 'append', index = False)", "(np.arange(len(currencies)) + 1), 'currency': currencies}) # send data frame to", "frame to use pd.merge() df_users = pd.DataFrame({'User_ID': users}) # left", "0).unique()) # create a data frame DIM_Gender = pd.DataFrame({'id_gender': (np.arange(len(genders))", "are in 'User_ID' in 'employee_roster' # we get 7659 'True'", "= 0).unique()) # create a data frame to use pd.merge()", "skills['UserId'], hours['UserId']], axis = 0).unique()) # create a data frame", "dimensions created from source employee_roster # %% create DIM_Currency #", "= 0).unique()) # create a data frame DIM_Gender = pd.DataFrame({'id_gender':", "genders}) # send data frame to DB DIM_Gender.to_sql('DIM_Gender', con =", "create DIM_Department # get unique values departments = sorted(pd.concat([employee_roster['Department'], skills['Department']],", "'User_ID', how ='left') # select only columns I need users_final", "= False) # %% dimensions created from source skills #", "# send data frame to DB DIM_AttributeSubGroup.to_sql('DIM_AttributeSubGroup', con = connection,", "create DIM_User # check if 'UserId' values in 'skills' are", "data frame DIM_Currency = pd.DataFrame({'id_currency': (np.arange(len(currencies)) + 1), 'currency': currencies})", "# select only columns I need users_final = users_final[['User_ID', 'Email_ID',", "if_exists = 'append', index = False) # %% create DIM_Department", "if_exists = 'append', index = False) # %% create DIM_AttributeSubGroup", "# %% dimensions created from source skills # %% create", "create a data frame DIM_AttributeSubGroup = pd.DataFrame({'id_att_sub_group': (np.arange(len(att_sub_group)) + 1),", "'Fullname': 'fullname'}, inplace = True) # send data frame to", "# check if 'UserId' values in 'skills' are in 'User_ID'", "employee_roster # %% create DIM_Currency # get unique values currencies", "'Fullname']] # rename columns users_final.rename(columns = {'User_ID': 'id_user', 'Email_ID': 'id_email',", "np.isin(hours['UserId'], employee_roster['User_ID']).sum() # get unique values users = sorted(pd.concat([employee_roster['User_ID'], skills['UserId'],", "# %% required libraries import numpy as np import pandas", "'User_ID' in 'employee_roster' # we get 7659 'True' values, meaning", "(np.arange(len(att_name)) + 1), 'attribute_name': att_name}) # send data frame to", "pd.DataFrame({'User_ID': users}) # left join 'df_user' with 'employee_roster' on 'UserID'", "employee_roster users_check_1 = np.isin(skills['UserId'], employee_roster['User_ID']).sum() # check if 'UserId' values", "# %% create DIM_Gender # get unique values genders =", "+ 1), 'attribute_group': att_group}) # send data frame to DB", "# send data frame to DB DIM_AttributeGroup.to_sql('DIM_AttributeGroup', con = connection,", "'attribute_name': att_name}) # send data frame to DB DIM_AttributeName.to_sql('DIM_AttributeName', con", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Created on", "# rename columns users_final.rename(columns = {'User_ID': 'id_user', 'Email_ID': 'id_email', 'Fullname':", "use pd.merge() df_users = pd.DataFrame({'User_ID': users}) # left join 'df_user'", "employee_roster['User_ID']).sum() # get unique values users = sorted(pd.concat([employee_roster['User_ID'], skills['UserId'], hours['UserId']],", "required libraries import numpy as np import pandas as pd", "sheet_name = 'Sheet1') # read skills data skills = pd.read_excel(\"datasources/skills.xlsx\",", "%% read data sets from where I will build the", "pd from sqlalchemy import create_engine # %% connect to DB", "skills = pd.read_excel(\"datasources/skills.xlsx\", sheet_name = \"Sheet1\") # read hours data", "in 'User_ID' in 'employee_roster' # we get 20134 'True' values,", "source skills # %% create DIM_AttributeGroup # get unique values", "# get unique values att_sub_group = sorted(skills['Attribute Sub-Group'].unique()) # create", "False) # %% create DIM_AttributeSubGroup # get unique values att_sub_group", "connection using pymssql engine = create_engine('mssql+pymssql://sa:<<PASSWORD>>@localhost:1433/rga') connection = engine.connect() #", "frame to DB DIM_Gender.to_sql('DIM_Gender', con = connection, if_exists = 'append',", "frame DIM_Currency = pd.DataFrame({'id_currency': (np.arange(len(currencies)) + 1), 'currency': currencies}) #", "employee_roster = pd.read_excel(\"datasources/Employee_Roster_Data.xlsx\", sheet_name = 'Sheet1') # read skills data", "index = False) # %% dimensions created from source skills", "# %% create DIM_Currency # get unique values currencies =", "where I will build the dimension tables # read employee", "DIM_AttributeName.to_sql('DIM_AttributeName', con = connection, if_exists = 'append', index = False)", "att_name}) # send data frame to DB DIM_AttributeName.to_sql('DIM_AttributeName', con =", "import numpy as np import pandas as pd from sqlalchemy", "# left join 'df_user' with 'employee_roster' on 'UserID' users_final =", "read employee roster data employee_roster = pd.read_excel(\"datasources/Employee_Roster_Data.xlsx\", sheet_name = 'Sheet1')", "# %% create DIM_User # check if 'UserId' values in", "'df_user' with 'employee_roster' on 'UserID' users_final = pd.merge(df_users, employee_roster, on", "to use pd.merge() df_users = pd.DataFrame({'User_ID': users}) # left join", "pd.DataFrame({'id_department': (np.arange(len(departments)) + 1), 'department': departments}) # send data frame", "data frame to DB DIM_Department.to_sql('DIM_Department', con = connection, if_exists =", "+ 1), 'department': departments}) # send data frame to DB", "read data sets from where I will build the dimension", "create a data frame DIM_Gender = pd.DataFrame({'id_gender': (np.arange(len(genders)) + 1),", "= connection, if_exists = 'append', index = False) # %%", "axis = 0).unique()) # create a data frame DIM_Gender =", "values, meaning that all 'UserId' in 'skills' are already #", "send data frame to DB DIM_AttributeSubGroup.to_sql('DIM_AttributeSubGroup', con = connection, if_exists", "in employee_roster users_check_1 = np.isin(skills['UserId'], employee_roster['User_ID']).sum() # check if 'UserId'", "# in 'User_ID' in employee_roster users_check_2 = np.isin(hours['UserId'], employee_roster['User_ID']).sum() #", "= pd.read_excel(\"datasources/Employee_Roster_Data.xlsx\", sheet_name = 'Sheet1') # read skills data skills", "pd.merge(df_users, employee_roster, on = 'User_ID', how ='left') # select only", "unique values genders = sorted(pd.concat([employee_roster['Gender'], skills['Gender']], axis = 0).unique()) #", "= \"Sheet1\") # %% dimensions created from source employee_roster #", "'append', index = False) # %% create DIM_Gender # get", "unique values currencies = sorted(employee_roster['Currency'].unique()) # create a data frame", "'hours' are already # in 'User_ID' in employee_roster users_check_2 =", "frame to DB DIM_AttributeSubGroup.to_sql('DIM_AttributeSubGroup', con = connection, if_exists = 'append',", "get unique values currencies = sorted(employee_roster['Currency'].unique()) # create a data", "to DB DIM_AttributeName.to_sql('DIM_AttributeName', con = connection, if_exists = 'append', index", "jm \"\"\" # %% required libraries import numpy as np", "data frame DIM_AttributeGroup = pd.DataFrame({'id_att_group': (np.arange(len(att_group)) + 1), 'attribute_group': att_group})", "'skills' are in 'User_ID' in 'employee_roster' # we get 20134", "DIM_Currency # get unique values currencies = sorted(employee_roster['Currency'].unique()) # create", "DIM_Department # get unique values departments = sorted(pd.concat([employee_roster['Department'], skills['Department']], axis", "'employee_roster' # we get 7659 'True' values, meaning that NOT", "frame to DB DIM_AttributeGroup.to_sql('DIM_AttributeGroup', con = connection, if_exists = 'append',", "<reponame>canovasjm/InterviewProject_JuanCanovas #!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Created", "= 'append', index = False) # %% dimensions created from", "employee roster data employee_roster = pd.read_excel(\"datasources/Employee_Roster_Data.xlsx\", sheet_name = 'Sheet1') #", "# create a data frame DIM_Currency = pd.DataFrame({'id_currency': (np.arange(len(currencies)) +", "'skills' are already # in 'User_ID' in employee_roster users_check_1 =", "DIM_Department.to_sql('DIM_Department', con = connection, if_exists = 'append', index = False)", "dimensions created from source skills # %% create DIM_AttributeGroup #", "that NOT all 'UserId' in 'hours' are already # in", "data frame DIM_AttributeSubGroup = pd.DataFrame({'id_att_sub_group': (np.arange(len(att_sub_group)) + 1), 'attribute_sub_group': att_sub_group})", "# send data frame to DB users_final.to_sql('DIM_User', con = connection,", "get unique values departments = sorted(pd.concat([employee_roster['Department'], skills['Department']], axis = 0).unique())", "'UserId' values in 'skills' are in 'User_ID' in 'employee_roster' #", "dimension tables # read employee roster data employee_roster = pd.read_excel(\"datasources/Employee_Roster_Data.xlsx\",", "pd.read_excel(\"datasources/hours.xlsx\", sheet_name = \"Sheet1\") # %% dimensions created from source", "DIM_Currency.to_sql('DIM_Currency', con = connection, if_exists = 'append', index = False)", "sorted(employee_roster['Currency'].unique()) # create a data frame DIM_Currency = pd.DataFrame({'id_currency': (np.arange(len(currencies))", "'fullname'}, inplace = True) # send data frame to DB", "pandas as pd from sqlalchemy import create_engine # %% connect", "= pd.DataFrame({'id_att_name': (np.arange(len(att_name)) + 1), 'attribute_name': att_name}) # send data", "# -*- coding: utf-8 -*- \"\"\" Created on Mon Mar", "# send data frame to DB DIM_Currency.to_sql('DIM_Currency', con = connection,", "'Email_ID', 'Fullname']] # rename columns users_final.rename(columns = {'User_ID': 'id_user', 'Email_ID':", "'gender': genders}) # send data frame to DB DIM_Gender.to_sql('DIM_Gender', con", "read hours data hours = pd.read_excel(\"datasources/hours.xlsx\", sheet_name = \"Sheet1\") #", "DIM_AttributeName = pd.DataFrame({'id_att_name': (np.arange(len(att_name)) + 1), 'attribute_name': att_name}) # send", "a data frame DIM_Gender = pd.DataFrame({'id_gender': (np.arange(len(genders)) + 1), 'gender':", "data frame DIM_AttributeName = pd.DataFrame({'id_att_name': (np.arange(len(att_name)) + 1), 'attribute_name': att_name})", "connect to DB # create connection using pymssql engine =", "= pd.merge(df_users, employee_roster, on = 'User_ID', how ='left') # select", "pymssql engine = create_engine('mssql+pymssql://sa:<<PASSWORD>>@localhost:1433/rga') connection = engine.connect() # %% read", "create connection using pymssql engine = create_engine('mssql+pymssql://sa:<<PASSWORD>>@localhost:1433/rga') connection = engine.connect()", "already # in 'User_ID' in employee_roster users_check_1 = np.isin(skills['UserId'], employee_roster['User_ID']).sum()", "rename columns users_final.rename(columns = {'User_ID': 'id_user', 'Email_ID': 'id_email', 'Fullname': 'fullname'},", "send data frame to DB DIM_AttributeGroup.to_sql('DIM_AttributeGroup', con = connection, if_exists", "# get unique values users = sorted(pd.concat([employee_roster['User_ID'], skills['UserId'], hours['UserId']], axis", "create a data frame DIM_Currency = pd.DataFrame({'id_currency': (np.arange(len(currencies)) + 1),", "= False) # %% create DIM_Department # get unique values", "# %% create DIM_AttributeGroup # get unique values att_group =", "numpy as np import pandas as pd from sqlalchemy import", "pd.merge() df_users = pd.DataFrame({'User_ID': users}) # left join 'df_user' with", "= False) # %% create DIM_User # check if 'UserId'", "DB DIM_Department.to_sql('DIM_Department', con = connection, if_exists = 'append', index =", "created from source skills # %% create DIM_AttributeGroup # get", "DB DIM_Currency.to_sql('DIM_Currency', con = connection, if_exists = 'append', index =", "Group'].unique()) # create a data frame DIM_AttributeGroup = pd.DataFrame({'id_att_group': (np.arange(len(att_group))", "axis = 0).unique()) # create a data frame to use", "check if 'UserId' values in 'skills' are in 'User_ID' in", "DB # create connection using pymssql engine = create_engine('mssql+pymssql://sa:<<PASSWORD>>@localhost:1433/rga') connection", "employee_roster users_check_2 = np.isin(hours['UserId'], employee_roster['User_ID']).sum() # get unique values users", "# create a data frame DIM_AttributeSubGroup = pd.DataFrame({'id_att_sub_group': (np.arange(len(att_sub_group)) +", "to DB DIM_AttributeGroup.to_sql('DIM_AttributeGroup', con = connection, if_exists = 'append', index", "to DB DIM_Gender.to_sql('DIM_Gender', con = connection, if_exists = 'append', index", "# get unique values currencies = sorted(employee_roster['Currency'].unique()) # create a", "sorted(skills['Attribute Name'].unique()) # create a data frame DIM_AttributeName = pd.DataFrame({'id_att_name':", "how ='left') # select only columns I need users_final =", "1), 'attribute_sub_group': att_sub_group}) # send data frame to DB DIM_AttributeSubGroup.to_sql('DIM_AttributeSubGroup',", "'append', index = False) # %% create DIM_User # check", "'User_ID' in employee_roster users_check_1 = np.isin(skills['UserId'], employee_roster['User_ID']).sum() # check if", "frame DIM_AttributeGroup = pd.DataFrame({'id_att_group': (np.arange(len(att_group)) + 1), 'attribute_group': att_group}) #", "in 'User_ID' in 'employee_roster' # we get 7659 'True' values,", "unique values users = sorted(pd.concat([employee_roster['User_ID'], skills['UserId'], hours['UserId']], axis = 0).unique())", "all 'UserId' in 'skills' are already # in 'User_ID' in", "DB DIM_Gender.to_sql('DIM_Gender', con = connection, if_exists = 'append', index =", "= sorted(pd.concat([employee_roster['User_ID'], skills['UserId'], hours['UserId']], axis = 0).unique()) # create a", "'attribute_group': att_group}) # send data frame to DB DIM_AttributeGroup.to_sql('DIM_AttributeGroup', con", "# %% connect to DB # create connection using pymssql", "frame DIM_AttributeName = pd.DataFrame({'id_att_name': (np.arange(len(att_name)) + 1), 'attribute_name': att_name}) #", "get unique values att_group = sorted(skills['Attribute Group'].unique()) # create a", "%% connect to DB # create connection using pymssql engine", "send data frame to DB DIM_Currency.to_sql('DIM_Currency', con = connection, if_exists", "att_group}) # send data frame to DB DIM_AttributeGroup.to_sql('DIM_AttributeGroup', con =", "are already # in 'User_ID' in employee_roster users_check_1 = np.isin(skills['UserId'],", "skills['Department']], axis = 0).unique()) # create a data frame DIM_Department", "DIM_Gender # get unique values genders = sorted(pd.concat([employee_roster['Gender'], skills['Gender']], axis", "as pd from sqlalchemy import create_engine # %% connect to", "# get unique values att_name = sorted(skills['Attribute Name'].unique()) # create", "'UserId' in 'skills' are already # in 'User_ID' in employee_roster", "'attribute_sub_group': att_sub_group}) # send data frame to DB DIM_AttributeSubGroup.to_sql('DIM_AttributeSubGroup', con", "create a data frame DIM_Department = pd.DataFrame({'id_department': (np.arange(len(departments)) + 1),", "DIM_Gender = pd.DataFrame({'id_gender': (np.arange(len(genders)) + 1), 'gender': genders}) # send", "DB DIM_AttributeName.to_sql('DIM_AttributeName', con = connection, if_exists = 'append', index =", "created from source employee_roster # %% create DIM_Currency # get", "unique values departments = sorted(pd.concat([employee_roster['Department'], skills['Department']], axis = 0).unique()) #", "a data frame to use pd.merge() df_users = pd.DataFrame({'User_ID': users})", "DB DIM_AttributeSubGroup.to_sql('DIM_AttributeSubGroup', con = connection, if_exists = 'append', index =", "= sorted(skills['Attribute Sub-Group'].unique()) # create a data frame DIM_AttributeSubGroup =", "= pd.read_excel(\"datasources/skills.xlsx\", sheet_name = \"Sheet1\") # read hours data hours", "data employee_roster = pd.read_excel(\"datasources/Employee_Roster_Data.xlsx\", sheet_name = 'Sheet1') # read skills", "users_final.rename(columns = {'User_ID': 'id_user', 'Email_ID': 'id_email', 'Fullname': 'fullname'}, inplace =", "# create a data frame to use pd.merge() df_users =", "= pd.DataFrame({'User_ID': users}) # left join 'df_user' with 'employee_roster' on", "in 'employee_roster' # we get 20134 'True' values, meaning that", "# check if 'UserId' values in 'hours' are in 'User_ID'", "sorted(skills['Attribute Group'].unique()) # create a data frame DIM_AttributeGroup = pd.DataFrame({'id_att_group':", "= pd.DataFrame({'id_department': (np.arange(len(departments)) + 1), 'department': departments}) # send data", "df_users = pd.DataFrame({'User_ID': users}) # left join 'df_user' with 'employee_roster'", "create DIM_AttributeSubGroup # get unique values att_sub_group = sorted(skills['Attribute Sub-Group'].unique())", "users_check_2 = np.isin(hours['UserId'], employee_roster['User_ID']).sum() # get unique values users =", "data frame to DB users_final.to_sql('DIM_User', con = connection, if_exists =", "from source employee_roster # %% create DIM_Currency # get unique", "we get 20134 'True' values, meaning that all 'UserId' in", "Mar 1 18:17:07 2021 @author: jm \"\"\" # %% required", "coding: utf-8 -*- \"\"\" Created on Mon Mar 1 18:17:07", "import create_engine # %% connect to DB # create connection", "a data frame DIM_Department = pd.DataFrame({'id_department': (np.arange(len(departments)) + 1), 'department':", "np.isin(skills['UserId'], employee_roster['User_ID']).sum() # check if 'UserId' values in 'hours' are", "%% create DIM_User # check if 'UserId' values in 'skills'", "Created on Mon Mar 1 18:17:07 2021 @author: jm \"\"\"", "if_exists = 'append', index = False) # %% dimensions created", "pd.read_excel(\"datasources/Employee_Roster_Data.xlsx\", sheet_name = 'Sheet1') # read skills data skills =", "1), 'currency': currencies}) # send data frame to DB DIM_Currency.to_sql('DIM_Currency',", "# read employee roster data employee_roster = pd.read_excel(\"datasources/Employee_Roster_Data.xlsx\", sheet_name =", "# get unique values genders = sorted(pd.concat([employee_roster['Gender'], skills['Gender']], axis =", "users_final[['User_ID', 'Email_ID', 'Fullname']] # rename columns users_final.rename(columns = {'User_ID': 'id_user',", "if 'UserId' values in 'hours' are in 'User_ID' in 'employee_roster'", "are in 'User_ID' in 'employee_roster' # we get 20134 'True'", "att_name = sorted(skills['Attribute Name'].unique()) # create a data frame DIM_AttributeName", "in 'skills' are in 'User_ID' in 'employee_roster' # we get", "meaning that all 'UserId' in 'skills' are already # in", "meaning that NOT all 'UserId' in 'hours' are already #", "DIM_User # check if 'UserId' values in 'skills' are in", "values att_sub_group = sorted(skills['Attribute Sub-Group'].unique()) # create a data frame", "sheet_name = \"Sheet1\") # read hours data hours = pd.read_excel(\"datasources/hours.xlsx\",", "# send data frame to DB DIM_Department.to_sql('DIM_Department', con = connection,", "if_exists = 'append', index = False) # %% create DIM_Gender", "-*- coding: utf-8 -*- \"\"\" Created on Mon Mar 1", "= create_engine('mssql+pymssql://sa:<<PASSWORD>>@localhost:1433/rga') connection = engine.connect() # %% read data sets", "on = 'User_ID', how ='left') # select only columns I", "'append', index = False) # %% create DIM_Department # get", "'id_user', 'Email_ID': 'id_email', 'Fullname': 'fullname'}, inplace = True) # send", "= \"Sheet1\") # read hours data hours = pd.read_excel(\"datasources/hours.xlsx\", sheet_name", "1), 'attribute_name': att_name}) # send data frame to DB DIM_AttributeName.to_sql('DIM_AttributeName',", "%% create DIM_AttributeGroup # get unique values att_group = sorted(skills['Attribute", "to DB DIM_AttributeSubGroup.to_sql('DIM_AttributeSubGroup', con = connection, if_exists = 'append', index", "(np.arange(len(att_sub_group)) + 1), 'attribute_sub_group': att_sub_group}) # send data frame to", "1), 'department': departments}) # send data frame to DB DIM_Department.to_sql('DIM_Department',", "%% dimensions created from source skills # %% create DIM_AttributeGroup", "DIM_Gender.to_sql('DIM_Gender', con = connection, if_exists = 'append', index = False)", "= users_final[['User_ID', 'Email_ID', 'Fullname']] # rename columns users_final.rename(columns = {'User_ID':", "# create a data frame DIM_Department = pd.DataFrame({'id_department': (np.arange(len(departments)) +", "\"Sheet1\") # %% dimensions created from source employee_roster # %%", "= pd.read_excel(\"datasources/hours.xlsx\", sheet_name = \"Sheet1\") # %% dimensions created from", "frame DIM_AttributeSubGroup = pd.DataFrame({'id_att_sub_group': (np.arange(len(att_sub_group)) + 1), 'attribute_sub_group': att_sub_group}) #", "create DIM_Gender # get unique values genders = sorted(pd.concat([employee_roster['Gender'], skills['Gender']],", "values in 'skills' are in 'User_ID' in 'employee_roster' # we", "con = connection, if_exists = 'append', index = False) #", "# get unique values departments = sorted(pd.concat([employee_roster['Department'], skills['Department']], axis =", "data frame to DB DIM_AttributeGroup.to_sql('DIM_AttributeGroup', con = connection, if_exists =", "DIM_AttributeGroup.to_sql('DIM_AttributeGroup', con = connection, if_exists = 'append', index = False)", "on 'UserID' users_final = pd.merge(df_users, employee_roster, on = 'User_ID', how", "False) # %% create DIM_Department # get unique values departments", "create DIM_AttributeGroup # get unique values att_group = sorted(skills['Attribute Group'].unique())", "(np.arange(len(genders)) + 1), 'gender': genders}) # send data frame to", "columns I need users_final = users_final[['User_ID', 'Email_ID', 'Fullname']] # rename", "unique values att_name = sorted(skills['Attribute Name'].unique()) # create a data", "index = False) # %% create DIM_AttributeSubGroup # get unique", "a data frame DIM_AttributeSubGroup = pd.DataFrame({'id_att_sub_group': (np.arange(len(att_sub_group)) + 1), 'attribute_sub_group':", "# send data frame to DB DIM_AttributeName.to_sql('DIM_AttributeName', con = connection,", "1), 'gender': genders}) # send data frame to DB DIM_Gender.to_sql('DIM_Gender',", "left join 'df_user' with 'employee_roster' on 'UserID' users_final = pd.merge(df_users,", "connection = engine.connect() # %% read data sets from where", "DIM_AttributeSubGroup = pd.DataFrame({'id_att_sub_group': (np.arange(len(att_sub_group)) + 1), 'attribute_sub_group': att_sub_group}) # send", "create a data frame DIM_AttributeGroup = pd.DataFrame({'id_att_group': (np.arange(len(att_group)) + 1),", "I will build the dimension tables # read employee roster", "'True' values, meaning that NOT all 'UserId' in 'hours' are", "= False) # %% create DIM_Gender # get unique values", "# send data frame to DB DIM_Gender.to_sql('DIM_Gender', con = connection,", "get unique values genders = sorted(pd.concat([employee_roster['Gender'], skills['Gender']], axis = 0).unique())", "sorted(pd.concat([employee_roster['Gender'], skills['Gender']], axis = 0).unique()) # create a data frame", "'UserId' in 'hours' are already # in 'User_ID' in employee_roster", "pd.DataFrame({'id_gender': (np.arange(len(genders)) + 1), 'gender': genders}) # send data frame", "frame to DB DIM_Department.to_sql('DIM_Department', con = connection, if_exists = 'append',", "if 'UserId' values in 'skills' are in 'User_ID' in 'employee_roster'", "employee_roster, on = 'User_ID', how ='left') # select only columns", "to DB users_final.to_sql('DIM_User', con = connection, if_exists = 'append', index", "= 0).unique()) # create a data frame DIM_Department = pd.DataFrame({'id_department':", "= False) # %% create DIM_AttributeName # get unique values", "create a data frame DIM_AttributeName = pd.DataFrame({'id_att_name': (np.arange(len(att_name)) + 1),", "we get 7659 'True' values, meaning that NOT all 'UserId'", "values att_group = sorted(skills['Attribute Group'].unique()) # create a data frame", "# get unique values att_group = sorted(skills['Attribute Group'].unique()) # create", "pd.read_excel(\"datasources/skills.xlsx\", sheet_name = \"Sheet1\") # read hours data hours =", "# create a data frame DIM_AttributeName = pd.DataFrame({'id_att_name': (np.arange(len(att_name)) +", "='left') # select only columns I need users_final = users_final[['User_ID',", "frame DIM_Department = pd.DataFrame({'id_department': (np.arange(len(departments)) + 1), 'department': departments}) #", "= 'append', index = False) # %% create DIM_Department #", "DIM_Department = pd.DataFrame({'id_department': (np.arange(len(departments)) + 1), 'department': departments}) # send", "data frame to DB DIM_Currency.to_sql('DIM_Currency', con = connection, if_exists =", "check if 'UserId' values in 'hours' are in 'User_ID' in", "NOT all 'UserId' in 'hours' are already # in 'User_ID'", "\"\"\" # %% required libraries import numpy as np import", "unique values att_sub_group = sorted(skills['Attribute Sub-Group'].unique()) # create a data", "+ 1), 'attribute_name': att_name}) # send data frame to DB", "currencies = sorted(employee_roster['Currency'].unique()) # create a data frame DIM_Currency =", "sheet_name = \"Sheet1\") # %% dimensions created from source employee_roster", "'UserId' values in 'hours' are in 'User_ID' in 'employee_roster' #", "+ 1), 'currency': currencies}) # send data frame to DB", "to DB # create connection using pymssql engine = create_engine('mssql+pymssql://sa:<<PASSWORD>>@localhost:1433/rga')", "need users_final = users_final[['User_ID', 'Email_ID', 'Fullname']] # rename columns users_final.rename(columns", "= 'append', index = False) # %% create DIM_AttributeName #", "'hours' are in 'User_ID' in 'employee_roster' # we get 7659", "index = False) # %% create DIM_User # check if", "'UserID' users_final = pd.merge(df_users, employee_roster, on = 'User_ID', how ='left')", "in 'hours' are already # in 'User_ID' in employee_roster users_check_2", "will build the dimension tables # read employee roster data", "1 18:17:07 2021 @author: jm \"\"\" # %% required libraries", "DB DIM_AttributeGroup.to_sql('DIM_AttributeGroup', con = connection, if_exists = 'append', index =", "a data frame DIM_AttributeName = pd.DataFrame({'id_att_name': (np.arange(len(att_name)) + 1), 'attribute_name':", "only columns I need users_final = users_final[['User_ID', 'Email_ID', 'Fullname']] #", "\"\"\" Created on Mon Mar 1 18:17:07 2021 @author: jm", "a data frame DIM_Currency = pd.DataFrame({'id_currency': (np.arange(len(currencies)) + 1), 'currency':", "False) # %% create DIM_AttributeName # get unique values att_name", "(np.arange(len(att_group)) + 1), 'attribute_group': att_group}) # send data frame to", "skills['Gender']], axis = 0).unique()) # create a data frame DIM_Gender", "= np.isin(hours['UserId'], employee_roster['User_ID']).sum() # get unique values users = sorted(pd.concat([employee_roster['User_ID'],", "values departments = sorted(pd.concat([employee_roster['Department'], skills['Department']], axis = 0).unique()) # create", "I need users_final = users_final[['User_ID', 'Email_ID', 'Fullname']] # rename columns", "# read skills data skills = pd.read_excel(\"datasources/skills.xlsx\", sheet_name = \"Sheet1\")", "values att_name = sorted(skills['Attribute Name'].unique()) # create a data frame", "0).unique()) # create a data frame DIM_Department = pd.DataFrame({'id_department': (np.arange(len(departments))", "data frame to DB DIM_AttributeName.to_sql('DIM_AttributeName', con = connection, if_exists =", "att_sub_group = sorted(skills['Attribute Sub-Group'].unique()) # create a data frame DIM_AttributeSubGroup", "to DB DIM_Department.to_sql('DIM_Department', con = connection, if_exists = 'append', index", "skills # %% create DIM_AttributeGroup # get unique values att_group", "%% required libraries import numpy as np import pandas as", "= np.isin(skills['UserId'], employee_roster['User_ID']).sum() # check if 'UserId' values in 'hours'", "# we get 7659 'True' values, meaning that NOT all", "Sub-Group'].unique()) # create a data frame DIM_AttributeSubGroup = pd.DataFrame({'id_att_sub_group': (np.arange(len(att_sub_group))", "18:17:07 2021 @author: jm \"\"\" # %% required libraries import", "= 'User_ID', how ='left') # select only columns I need", "%% create DIM_AttributeName # get unique values att_name = sorted(skills['Attribute", "= pd.DataFrame({'id_att_sub_group': (np.arange(len(att_sub_group)) + 1), 'attribute_sub_group': att_sub_group}) # send data", "att_sub_group}) # send data frame to DB DIM_AttributeSubGroup.to_sql('DIM_AttributeSubGroup', con =", "= 'append', index = False) # %% create DIM_AttributeSubGroup #", "= 'append', index = False) # %% create DIM_Gender #", "as np import pandas as pd from sqlalchemy import create_engine", "values users = sorted(pd.concat([employee_roster['User_ID'], skills['UserId'], hours['UserId']], axis = 0).unique()) #", "with 'employee_roster' on 'UserID' users_final = pd.merge(df_users, employee_roster, on =", "False) # %% dimensions created from source skills # %%", "currencies}) # send data frame to DB DIM_Currency.to_sql('DIM_Currency', con =", "are already # in 'User_ID' in employee_roster users_check_2 = np.isin(hours['UserId'],", "Name'].unique()) # create a data frame DIM_AttributeName = pd.DataFrame({'id_att_name': (np.arange(len(att_name))", "# create a data frame DIM_Gender = pd.DataFrame({'id_gender': (np.arange(len(genders)) +", "20134 'True' values, meaning that all 'UserId' in 'skills' are", "%% dimensions created from source employee_roster # %% create DIM_Currency", "frame to DB users_final.to_sql('DIM_User', con = connection, if_exists = 'append',", "False) # %% create DIM_User # check if 'UserId' values", "get unique values users = sorted(pd.concat([employee_roster['User_ID'], skills['UserId'], hours['UserId']], axis =", "'employee_roster' # we get 20134 'True' values, meaning that all", "create DIM_AttributeName # get unique values att_name = sorted(skills['Attribute Name'].unique())", "create a data frame to use pd.merge() df_users = pd.DataFrame({'User_ID':", "hours = pd.read_excel(\"datasources/hours.xlsx\", sheet_name = \"Sheet1\") # %% dimensions created", "users}) # left join 'df_user' with 'employee_roster' on 'UserID' users_final", "frame to DB DIM_Currency.to_sql('DIM_Currency', con = connection, if_exists = 'append',", "libraries import numpy as np import pandas as pd from", "send data frame to DB DIM_Gender.to_sql('DIM_Gender', con = connection, if_exists", "# %% dimensions created from source employee_roster # %% create", "DIM_AttributeSubGroup.to_sql('DIM_AttributeSubGroup', con = connection, if_exists = 'append', index = False)", "= pd.DataFrame({'id_att_group': (np.arange(len(att_group)) + 1), 'attribute_group': att_group}) # send data", "# read hours data hours = pd.read_excel(\"datasources/hours.xlsx\", sheet_name = \"Sheet1\")", "departments = sorted(pd.concat([employee_roster['Department'], skills['Department']], axis = 0).unique()) # create a", "values currencies = sorted(employee_roster['Currency'].unique()) # create a data frame DIM_Currency", "+ 1), 'attribute_sub_group': att_sub_group}) # send data frame to DB", "in 'employee_roster' # we get 7659 'True' values, meaning that", "users_check_1 = np.isin(skills['UserId'], employee_roster['User_ID']).sum() # check if 'UserId' values in", "= sorted(skills['Attribute Group'].unique()) # create a data frame DIM_AttributeGroup =", "join 'df_user' with 'employee_roster' on 'UserID' users_final = pd.merge(df_users, employee_roster,", "users_final = pd.merge(df_users, employee_roster, on = 'User_ID', how ='left') #", "= 'append', index = False) # %% create DIM_User #", "%% create DIM_Currency # get unique values currencies = sorted(employee_roster['Currency'].unique())", "True) # send data frame to DB users_final.to_sql('DIM_User', con =", "data frame to DB DIM_AttributeSubGroup.to_sql('DIM_AttributeSubGroup', con = connection, if_exists =", "utf-8 -*- \"\"\" Created on Mon Mar 1 18:17:07 2021", "pd.DataFrame({'id_currency': (np.arange(len(currencies)) + 1), 'currency': currencies}) # send data frame", "'append', index = False) # %% create DIM_AttributeName # get", "select only columns I need users_final = users_final[['User_ID', 'Email_ID', 'Fullname']]", "from sqlalchemy import create_engine # %% connect to DB #", "'append', index = False) # %% dimensions created from source", "engine = create_engine('mssql+pymssql://sa:<<PASSWORD>>@localhost:1433/rga') connection = engine.connect() # %% read data", "hours data hours = pd.read_excel(\"datasources/hours.xlsx\", sheet_name = \"Sheet1\") # %%", "= sorted(skills['Attribute Name'].unique()) # create a data frame DIM_AttributeName =", "to DB DIM_Currency.to_sql('DIM_Currency', con = connection, if_exists = 'append', index", "# %% create DIM_Department # get unique values departments =", "sets from where I will build the dimension tables #", "# we get 20134 'True' values, meaning that all 'UserId'", "# %% create DIM_AttributeSubGroup # get unique values att_sub_group =", "on Mon Mar 1 18:17:07 2021 @author: jm \"\"\" #", "all 'UserId' in 'hours' are already # in 'User_ID' in", "DIM_AttributeGroup # get unique values att_group = sorted(skills['Attribute Group'].unique()) #", "data frame DIM_Gender = pd.DataFrame({'id_gender': (np.arange(len(genders)) + 1), 'gender': genders})", "1), 'attribute_group': att_group}) # send data frame to DB DIM_AttributeGroup.to_sql('DIM_AttributeGroup',", "employee_roster['User_ID']).sum() # check if 'UserId' values in 'hours' are in", "\"Sheet1\") # read hours data hours = pd.read_excel(\"datasources/hours.xlsx\", sheet_name =", "= False) # %% create DIM_AttributeSubGroup # get unique values", "-*- \"\"\" Created on Mon Mar 1 18:17:07 2021 @author:", "# create connection using pymssql engine = create_engine('mssql+pymssql://sa:<<PASSWORD>>@localhost:1433/rga') connection =", "import pandas as pd from sqlalchemy import create_engine # %%", "sqlalchemy import create_engine # %% connect to DB # create", "= {'User_ID': 'id_user', 'Email_ID': 'id_email', 'Fullname': 'fullname'}, inplace = True)", "pd.DataFrame({'id_att_group': (np.arange(len(att_group)) + 1), 'attribute_group': att_group}) # send data frame", "'True' values, meaning that all 'UserId' in 'skills' are already", "from where I will build the dimension tables # read", "0).unique()) # create a data frame to use pd.merge() df_users", "in 'skills' are already # in 'User_ID' in employee_roster users_check_1", "a data frame DIM_AttributeGroup = pd.DataFrame({'id_att_group': (np.arange(len(att_group)) + 1), 'attribute_group':", "tables # read employee roster data employee_roster = pd.read_excel(\"datasources/Employee_Roster_Data.xlsx\", sheet_name", "# %% read data sets from where I will build", "'append', index = False) # %% create DIM_AttributeSubGroup # get", "'id_email', 'Fullname': 'fullname'}, inplace = True) # send data frame", "False) # %% create DIM_Gender # get unique values genders", "using pymssql engine = create_engine('mssql+pymssql://sa:<<PASSWORD>>@localhost:1433/rga') connection = engine.connect() # %%", "python3 # -*- coding: utf-8 -*- \"\"\" Created on Mon", "engine.connect() # %% read data sets from where I will", "%% create DIM_AttributeSubGroup # get unique values att_sub_group = sorted(skills['Attribute", "hours['UserId']], axis = 0).unique()) # create a data frame to", "frame to DB DIM_AttributeName.to_sql('DIM_AttributeName', con = connection, if_exists = 'append',", "inplace = True) # send data frame to DB users_final.to_sql('DIM_User',", "get unique values att_sub_group = sorted(skills['Attribute Sub-Group'].unique()) # create a", "genders = sorted(pd.concat([employee_roster['Gender'], skills['Gender']], axis = 0).unique()) # create a", "source employee_roster # %% create DIM_Currency # get unique values", "= sorted(employee_roster['Currency'].unique()) # create a data frame DIM_Currency = pd.DataFrame({'id_currency':", "%% create DIM_Gender # get unique values genders = sorted(pd.concat([employee_roster['Gender'],", "in 'User_ID' in employee_roster users_check_2 = np.isin(hours['UserId'], employee_roster['User_ID']).sum() # get", "values in 'hours' are in 'User_ID' in 'employee_roster' # we", "att_group = sorted(skills['Attribute Group'].unique()) # create a data frame DIM_AttributeGroup", "create DIM_Currency # get unique values currencies = sorted(employee_roster['Currency'].unique()) #", "if_exists = 'append', index = False) # %% create DIM_User", "users = sorted(pd.concat([employee_roster['User_ID'], skills['UserId'], hours['UserId']], axis = 0).unique()) # create", "'currency': currencies}) # send data frame to DB DIM_Currency.to_sql('DIM_Currency', con", "data frame DIM_Department = pd.DataFrame({'id_department': (np.arange(len(departments)) + 1), 'department': departments})", "@author: jm \"\"\" # %% required libraries import numpy as", "users_final = users_final[['User_ID', 'Email_ID', 'Fullname']] # rename columns users_final.rename(columns =", "np import pandas as pd from sqlalchemy import create_engine #", "connection, if_exists = 'append', index = False) # %% create", "= engine.connect() # %% read data sets from where I", "= sorted(pd.concat([employee_roster['Gender'], skills['Gender']], axis = 0).unique()) # create a data", "unique values att_group = sorted(skills['Attribute Group'].unique()) # create a data", "Mon Mar 1 18:17:07 2021 @author: jm \"\"\" # %%", "'employee_roster' on 'UserID' users_final = pd.merge(df_users, employee_roster, on = 'User_ID',", "send data frame to DB users_final.to_sql('DIM_User', con = connection, if_exists", "data hours = pd.read_excel(\"datasources/hours.xlsx\", sheet_name = \"Sheet1\") # %% dimensions", "columns users_final.rename(columns = {'User_ID': 'id_user', 'Email_ID': 'id_email', 'Fullname': 'fullname'}, inplace", "skills data skills = pd.read_excel(\"datasources/skills.xlsx\", sheet_name = \"Sheet1\") # read", "+ 1), 'gender': genders}) # send data frame to DB", "index = False) # %% create DIM_Gender # get unique", "read skills data skills = pd.read_excel(\"datasources/skills.xlsx\", sheet_name = \"Sheet1\") #", "send data frame to DB DIM_Department.to_sql('DIM_Department', con = connection, if_exists", "data skills = pd.read_excel(\"datasources/skills.xlsx\", sheet_name = \"Sheet1\") # read hours", "from source skills # %% create DIM_AttributeGroup # get unique", "(np.arange(len(departments)) + 1), 'department': departments}) # send data frame to", "'Sheet1') # read skills data skills = pd.read_excel(\"datasources/skills.xlsx\", sheet_name =", "sorted(pd.concat([employee_roster['Department'], skills['Department']], axis = 0).unique()) # create a data frame", "= True) # send data frame to DB users_final.to_sql('DIM_User', con", "# %% create DIM_AttributeName # get unique values att_name =", "sorted(skills['Attribute Sub-Group'].unique()) # create a data frame DIM_AttributeSubGroup = pd.DataFrame({'id_att_sub_group':", "DIM_AttributeGroup = pd.DataFrame({'id_att_group': (np.arange(len(att_group)) + 1), 'attribute_group': att_group}) # send", "pd.DataFrame({'id_att_name': (np.arange(len(att_name)) + 1), 'attribute_name': att_name}) # send data frame", "sorted(pd.concat([employee_roster['User_ID'], skills['UserId'], hours['UserId']], axis = 0).unique()) # create a data", "create_engine('mssql+pymssql://sa:<<PASSWORD>>@localhost:1433/rga') connection = engine.connect() # %% read data sets from", "get 20134 'True' values, meaning that all 'UserId' in 'skills'", "'User_ID' in employee_roster users_check_2 = np.isin(hours['UserId'], employee_roster['User_ID']).sum() # get unique", "= sorted(pd.concat([employee_roster['Department'], skills['Department']], axis = 0).unique()) # create a data", "roster data employee_roster = pd.read_excel(\"datasources/Employee_Roster_Data.xlsx\", sheet_name = 'Sheet1') # read", "data frame to DB DIM_Gender.to_sql('DIM_Gender', con = connection, if_exists =", "DIM_AttributeSubGroup # get unique values att_sub_group = sorted(skills['Attribute Sub-Group'].unique()) #", "'Email_ID': 'id_email', 'Fullname': 'fullname'}, inplace = True) # send data", "values, meaning that NOT all 'UserId' in 'hours' are already", "if_exists = 'append', index = False) # %% create DIM_AttributeName", "frame DIM_Gender = pd.DataFrame({'id_gender': (np.arange(len(genders)) + 1), 'gender': genders}) #", "index = False) # %% create DIM_AttributeName # get unique", "get unique values att_name = sorted(skills['Attribute Name'].unique()) # create a", "= pd.DataFrame({'id_gender': (np.arange(len(genders)) + 1), 'gender': genders}) # send data", "in 'User_ID' in employee_roster users_check_1 = np.isin(skills['UserId'], employee_roster['User_ID']).sum() # check", "'department': departments}) # send data frame to DB DIM_Department.to_sql('DIM_Department', con", "index = False) # %% create DIM_Department # get unique", "# in 'User_ID' in employee_roster users_check_1 = np.isin(skills['UserId'], employee_roster['User_ID']).sum() #", "DIM_Currency = pd.DataFrame({'id_currency': (np.arange(len(currencies)) + 1), 'currency': currencies}) # send", "that all 'UserId' in 'skills' are already # in 'User_ID'", "create_engine # %% connect to DB # create connection using", "axis = 0).unique()) # create a data frame DIM_Department =", "2021 @author: jm \"\"\" # %% required libraries import numpy", "data frame to use pd.merge() df_users = pd.DataFrame({'User_ID': users}) #", "connection, if_exists = 'append', index = False) # %% dimensions", "'User_ID' in 'employee_roster' # we get 20134 'True' values, meaning", "= 'Sheet1') # read skills data skills = pd.read_excel(\"datasources/skills.xlsx\", sheet_name", "# create a data frame DIM_AttributeGroup = pd.DataFrame({'id_att_group': (np.arange(len(att_group)) +", "get 7659 'True' values, meaning that NOT all 'UserId' in", "pd.DataFrame({'id_att_sub_group': (np.arange(len(att_sub_group)) + 1), 'attribute_sub_group': att_sub_group}) # send data frame", "DIM_AttributeName # get unique values att_name = sorted(skills['Attribute Name'].unique()) #", "already # in 'User_ID' in employee_roster users_check_2 = np.isin(hours['UserId'], employee_roster['User_ID']).sum()", "the dimension tables # read employee roster data employee_roster =", "data sets from where I will build the dimension tables", "build the dimension tables # read employee roster data employee_roster", "departments}) # send data frame to DB DIM_Department.to_sql('DIM_Department', con =", "send data frame to DB DIM_AttributeName.to_sql('DIM_AttributeName', con = connection, if_exists", "%% create DIM_Department # get unique values departments = sorted(pd.concat([employee_roster['Department'],", "DB users_final.to_sql('DIM_User', con = connection, if_exists = 'append', index =", "in employee_roster users_check_2 = np.isin(hours['UserId'], employee_roster['User_ID']).sum() # get unique values" ]
[ "`aSHish#1198`. Have a nice stay in the server :)\" await", "name=f\"{heart} **No advertisements**\", value=f\"{arrow} We do not tolerate any kind", "should be executed here.\\n{arrow} <#876798696078065694> You can give suggestions for", "__CHANNELS__\", value=f\"{arrow} <#877030933847490691> Read the rules here.\\n{arrow} <#877031867440832574> Channel for", "being an **early supporter**!! If you need any kind of", "Guidelines**\", value=f\"{arrow} You can find them here: https://discordapp.com/guidelines\", inline=False )", "have this role.\\n{arrow} <@&876804164661944340> All other users who join this", "you need any kind of help or support just ping", "inline=False ) emb.add_field( name=f\"{heart} **DECISIONS AND ISSUES**\", value = f\"{arrow}", "in the server :)\" await member.send(desc) else: return def setup(client):", "and authority after owner.\\n{arrow} <@&876818242058997791> Moderators of the server meant", "improving Kanna Chan here.\\n{arrow} <#876798720254029864> You can report BUGS here", "any kind of advertisements, whether it be for other communities", "or picture if the staff deems them inappropriate.\", inline=False )", "pornographic/adult/other NSFW material**\", value=f\"{arrow} This is a community server and", "Discord Community Guidelines**\", value=f\"{arrow} You can find them here: https://discordapp.com/guidelines\",", "and Mods will Mute/Kick/Ban per discretion. If you feel mistreated", "CAN PING ANY STAFF MEMBER OR DEVELOPER WHILE REPORTING BUG", "inline=False ) emb.add_field( name=f\"{heart} **No pornographic/adult/other NSFW material**\", value=f\"{arrow} This", "Moderators of the server meant to moderate the chat and", "them.***\", inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.Cog.listener()", "them here: https://discordapp.com/guidelines\", inline=False ) emb.add_field( name=f\"{heart} **VOICE CHANNELS**\", value=f\"{arrow}", "discord.Embed(title=f\"{kbored} RULES {kbored}\", color=0xfc74c6) emb.add_field( name=f\"{heart} **Be respectful**\", value=f\"You must", "default. They have image and embed perms by deault.\\n{arrow} **PS:", "await ctx.send(embed=emb) @commands.command() @commands.is_owner() async def rule(self, ctx): kana =", "name=f\"{heart} **No pornographic/adult/other NSFW material**\", value=f\"{arrow} This is a community", "and not meant to share this kind of material.\", inline=False", "help or support just ping any staff member or DM", "Server and have the highest power and authority after owner.\\n{arrow}", "is a community server and not meant to share this", "done at any time without notice, it is your responsibility", "DM `aSHish#1198`. Have a nice stay in the server :)\"", "await member.add_roles(member_role) desc = f\"{member.name} Thanks for joining Kanna's Server.", "(Video/Art)\", inline=False ) emb.add_field( name=f\"{heart} **No offensive names and profile", "member.guild.get_role(876804164661944340) await member.add_roles(member_role) desc = f\"{member.name} Thanks for joining Kanna's", "meant for having fun, chilling and spending time with others.\\n{arrow}", "ctx.send(embed=emb) @commands.Cog.listener() async def on_member_join(self, member): if member.guild.id == 876798564704084008:", "for having fun, chilling and spending time with others.\\n{arrow} This", "the Discord Community Guidelines**\", value=f\"{arrow} You can find them here:", "f\"{arrow} **DRAGON LOLI'S HOME** is the official Server of the", "You can give suggestions for improving Kanna Chan here.\\n{arrow} <#876798720254029864>", "server meant to moderate the chat and maintain a positive", "spending time with others.\\n{arrow} This server has cute emotes and", "will resolve the issue.***\", inline=False ) emb.add_field( name=f\"{heart} **CHANGES**\", value", "name=f\"{kwee} __ROLES__\", value=f\"{arrow} <@&876800883441156138> The highest role supposed to be", "a minimum. However, any derogatory language towards any user is", "name=f\"{heart} **Server Raiding**\", value=f\"{arrow} Raiding or mentions of raiding are", "you feel mistreated DM an Admin and we will resolve", "value=f\"{arrow} Do not join voice chat channels without permission of", "executed here.\\n{arrow} <#876798696078065694> You can give suggestions for improving Kanna", "actual value (Video/Art)\", inline=False ) emb.add_field( name=f\"{heart} **No offensive names", "mentions of raiding are not allowed.\", inline=False ) emb.add_field( name=f\"{heart}", "might be done at any time without notice, it is", "kind of material.\", inline=False ) emb.add_field( name=f\"{heart} **No advertisements**\", value=f\"{arrow}", "member.send(desc) else: return def setup(client): client.add_cog(Server(client)) print(\">> Server Utility loaded\")", "**Follow the Discord Community Guidelines**\", value=f\"{arrow} You can find them", "for them.***\", inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb)", "discord.File(\"./images/rules.png\") await ctx.send(file=rule_file) emb = discord.Embed(title=f\"{kbored} RULES {kbored}\", color=0xfc74c6) emb.add_field(", "any kind of help or support just ping any staff", "emb.add_field( name=f\"{heart} **Be respectful**\", value=f\"You must respect all users, regardless", "If you need any kind of help or support just", "users who join this server get this role by default.", "def rule(self, ctx): kana = self.client.get_user(self.kana_id) rule_file = discord.File(\"./images/rules.png\") await", "It's a friendly community meant for having fun, chilling and", "f\"{arrow} ***The Admins and Mods will Mute/Kick/Ban per discretion. If", "maintain a positive environment in community.\\n{arrow} <@&876801038420701196> Developer(s) of <NAME>", "time with others.\\n{arrow} This server has cute emotes and a", "query use this channel.\\n{arrow} **P.S: YOU CAN PING ANY STAFF", "or query use this channel.\\n{arrow} **P.S: YOU CAN PING ANY", "value=f\"{arrow} This is a community server and not meant to", "this role by default. They have image and embed perms", "an **early supporter**!! If you need any kind of help", "any other support or query use this channel.\\n{arrow} **P.S: YOU", "value=f\"{arrow} Raiding or mentions of raiding are not allowed.\", inline=False", "deault.\\n{arrow} **PS: APART FROM THESE SELF-ROLES ARE ALSO AVAIALBLE FOR", "Treat others the way you want to be treated.\", inline=False", "**No offensive names and profile pictures**\", value=f\"{arrow} You will be", "profanity should be kept to a minimum. However, any derogatory", "Don't send a lot of small messages right after each", "right after each other. Do not disrupt chat by spamming.\",", "Thanks for being an **early supporter**!! If you need any", "**early supporter**!! If you need any kind of help or", "stay in the server :)\" await member.send(desc) else: return def", "value=f\"{arrow} You will be asked to change your name or", "of profanity should be kept to a minimum. However, any", "use this channel.\\n{arrow} **P.S: YOU CAN PING ANY STAFF MEMBER", "the staff deems them inappropriate.\", inline=False ) emb.add_field( name=f\"{heart} **Server", "with others.\\n{arrow} This server has cute emotes and a lot", "stay tuned!\", color=0xfc74c6) emb.add_field( name=f\"{kwee} __ROLES__\", value=f\"{arrow} <@&876800883441156138> The highest", "THESE SELF-ROLES ARE ALSO AVAIALBLE FOR MEMBERS.**\", inline=False ) emb.add_field(", "staff deems them inappropriate.\", inline=False ) emb.add_field( name=f\"{heart} **Server Raiding**\",", ") emb.add_field( name=f\"{heart} **Server Raiding**\", value=f\"{arrow} Raiding or mentions of", "= \"<a:right:877425183839891496>\" kwee = \"<:kannawee:877036162122924072>\" kdance = \"<a:kanna_dance:877038778798207016>\" kbored =", "role.\\n{arrow} <@&876804164661944340> All other users who join this server get", "import commands arrow = \"<a:right:877425183839891496>\" kwee = \"<:kannawee:877036162122924072>\" kdance =", "emb.add_field( name=f\"{heart} **No offensive names and profile pictures**\", value=f\"{arrow} You", "post your content in the media channel if it is", "communities or streams. You can post your content in the", "not tolerate any kind of advertisements, whether it be for", "user is prohibited.\", inline=False ) emb.add_field( name=f\"{heart} **No spamming**\", value=f\"{arrow}", "fun events are about to be done here! So, stay", "= discord.File(\"./images/rules.png\") await ctx.send(file=rule_file) emb = discord.Embed(title=f\"{kbored} RULES {kbored}\", color=0xfc74c6)", "offensive names and profile pictures**\", value=f\"{arrow} You will be asked", "Read the rules here.\\n{arrow} <#877031867440832574> Channel for grabbing self-roles.\\n{arrow} <#876798564704084011>", "Chan.\\n{arrow} <@&876817811396263946> Admins of the Server and have the highest", "Server(commands.Cog): def __init__(self, client): self.client = client self.kana_id = 857835279259664403", "***Your presence in this server implies accepting these rules, including", "__init__(self, client): self.client = client self.kana_id = 857835279259664403 @commands.command() @commands.is_owner()", "in Kanna Chan.\\n{arrow} <#876798750876651530> For any other support or query", "media channel if it is relevant and provides actual value", "here.\\n{arrow} <#876798720254029864> You can report BUGS here if you find", "rule_file = discord.File(\"./images/rules.png\") await ctx.send(file=rule_file) emb = discord.Embed(title=f\"{kbored} RULES {kbored}\",", "member_role = member.guild.get_role(876804164661944340) await member.add_roles(member_role) desc = f\"{member.name} Thanks for", "@commands.is_owner() async def sabout(self, ctx): kana = self.client.get_user(self.kana_id) about_file =", "and disallowed.\", inline=False ) emb.add_field( name=f\"{heart} **Follow the Discord Community", "Admins of the Server and have the highest power and", "other support or query use this channel.\\n{arrow} **P.S: YOU CAN", "**PS: APART FROM THESE SELF-ROLES ARE ALSO AVAIALBLE FOR MEMBERS.**\",", "in the media channel if it is relevant and provides", "You will be asked to change your name or picture", "inappropriate.\", inline=False ) emb.add_field( name=f\"{heart} **Server Raiding**\", value=f\"{arrow} Raiding or", "DoX, abuse, and other malicious threats are absolutely prohibited and", "role supposed to be only for Kanna Chan.\\n{arrow} <@&876817811396263946> Admins", "this server get this role by default. They have image", "will be asked to change your name or picture if", "others.\\n{arrow} This server has cute emotes and a lot of", "QUERY.**\", inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.command()", "= self.client.get_user(self.kana_id) rule_file = discord.File(\"./images/rules.png\") await ctx.send(file=rule_file) emb = discord.Embed(title=f\"{kbored}", "the chat and maintain a positive environment in community.\\n{arrow} <@&876801038420701196>", "implies accepting these rules, including all further changes. These changes", "color=0xfc74c6) emb.add_field( name=f\"{kwee} __ROLES__\", value=f\"{arrow} <@&876800883441156138> The highest role supposed", "positive environment in community.\\n{arrow} <@&876801038420701196> Developer(s) of <NAME> have this", "icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.command() @commands.is_owner() async def rule(self, ctx):", "Bot Commands should be executed here.\\n{arrow} <#876798696078065694> You can give", "chat and maintain a positive environment in community.\\n{arrow} <@&876801038420701196> Developer(s)", "= \"<a:kanna_dance:877038778798207016>\" kbored = \"<:kanna_bored:877036162827583538>\" ksmug = \"<:kanna_smug:877038777896427560>\" heart =", "chat for the server.\\n{arrow} <#876798809819189249> Bot Commands should be executed", "channel if it is relevant and provides actual value (Video/Art)\",", "For any other support or query use this channel.\\n{arrow} **P.S:", "Threats to other users of DDoS, Death, DoX, abuse, and", "@commands.Cog.listener() async def on_member_join(self, member): if member.guild.id == 876798564704084008: if", "emb.add_field( name=f\"{heart} **Follow the Discord Community Guidelines**\", value=f\"{arrow} You can", "want to be treated.\", inline=False ) emb.add_field( name=f\"{heart} **No Inappropriate", "kind of help or support just ping any staff member", "nice stay in the server :)\" await member.send(desc) else: return", "value=f\"{arrow} You can find them here: https://discordapp.com/guidelines\", inline=False ) emb.add_field(", "SERVER {kdance}\",description = f\"{arrow} **DRAGON LOLI'S HOME** is the official", "member.guild.id == 876798564704084008: if member.bot: return else: member_role = member.guild.get_role(876804164661944340)", "class Server(commands.Cog): def __init__(self, client): self.client = client self.kana_id =", "tuned!\", color=0xfc74c6) emb.add_field( name=f\"{kwee} __ROLES__\", value=f\"{arrow} <@&876800883441156138> The highest role", "BUGS here if you find any in Kanna Chan.\\n{arrow} <#876798750876651530>", "RULES {kbored}\", color=0xfc74c6) emb.add_field( name=f\"{heart} **Be respectful**\", value=f\"You must respect", "Do not join voice chat channels without permission of the", "client): self.client = client self.kana_id = 857835279259664403 @commands.command() @commands.is_owner() async", "These changes might be done at any time without notice,", ") emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.Cog.listener() async def", "respect all users, regardless of your liking towards them. Treat", "highest role supposed to be only for Kanna Chan.\\n{arrow} <@&876817811396263946>", "NSFW material**\", value=f\"{arrow} This is a community server and not", "the official Server of the bot **Kanna Chan**. It's a", "events are about to be done here! So, stay tuned!\",", "inline=False ) emb.add_field( name=f\"{heart} **Server Raiding**\", value=f\"{arrow} Raiding or mentions", "just ping any staff member or DM `aSHish#1198`. Have a", "and have the highest power and authority after owner.\\n{arrow} <@&876818242058997791>", "ABOUT SERVER {kdance}\",description = f\"{arrow} **DRAGON LOLI'S HOME** is the", ") emb.add_field( name=f\"{heart} **CHANGES**\", value = f\"{arrow} ***Your presence in", "name=f\"{heart} **CHANGES**\", value = f\"{arrow} ***Your presence in this server", "All other users who join this server get this role", "**Kanna Chan**. It's a friendly community meant for having fun,", "{kbored}\", color=0xfc74c6) emb.add_field( name=f\"{heart} **Be respectful**\", value=f\"You must respect all", "liking towards them. Treat others the way you want to", "discretion. If you feel mistreated DM an Admin and we", "join voice chat channels without permission of the people already", "They have image and embed perms by deault.\\n{arrow} **PS: APART", "can report BUGS here if you find any in Kanna", "the issue.***\", inline=False ) emb.add_field( name=f\"{heart} **CHANGES**\", value = f\"{arrow}", "use of profanity should be kept to a minimum. However,", "in this server implies accepting these rules, including all further", "absolutely prohibited and disallowed.\", inline=False ) emb.add_field( name=f\"{heart} **Follow the", "= \"<:kanna_smug:877038777896427560>\" heart = \"<a:explosion_heart:877426228775227392>\" class Server(commands.Cog): def __init__(self, client):", "community server and not meant to share this kind of", "ISSUES**\", value = f\"{arrow} ***The Admins and Mods will Mute/Kick/Ban", "supporter**!! If you need any kind of help or support", "about_file) emb = discord.Embed(title=f\"{kdance} ABOUT SERVER {kdance}\",description = f\"{arrow} **DRAGON", "\"<:kanna_bored:877036162827583538>\" ksmug = \"<:kanna_smug:877038777896427560>\" heart = \"<a:explosion_heart:877426228775227392>\" class Server(commands.Cog): def", "general chat for the server.\\n{arrow} <#876798809819189249> Bot Commands should be", "Have a nice stay in the server :)\" await member.send(desc)", "<#877030933847490691> Read the rules here.\\n{arrow} <#877031867440832574> Channel for grabbing self-roles.\\n{arrow}", "self-roles.\\n{arrow} <#876798564704084011> The general chat for the server.\\n{arrow} <#876798809819189249> Bot", "= f\"{arrow} ***Your presence in this server implies accepting these", "get this role by default. They have image and embed", "discord.Embed(title=f\"{kdance} ABOUT SERVER {kdance}\",description = f\"{arrow} **DRAGON LOLI'S HOME** is", "However, any derogatory language towards any user is prohibited.\", inline=False", "send a lot of small messages right after each other.", "or support just ping any staff member or DM `aSHish#1198`.", "a lot of fun events are about to be done", "MEMBERS.**\", inline=False ) emb.add_field( name=f\"{ksmug} __CHANNELS__\", value=f\"{arrow} <#877030933847490691> Read the", "emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.command() @commands.is_owner() async def", "<#876798696078065694> You can give suggestions for improving Kanna Chan here.\\n{arrow}", "for Kanna Chan.\\n{arrow} <@&876817811396263946> Admins of the Server and have", "await member.send(desc) else: return def setup(client): client.add_cog(Server(client)) print(\">> Server Utility", "text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.command() @commands.is_owner() async def rule(self,", "raiding are not allowed.\", inline=False ) emb.add_field( name=f\"{heart} **Direct &", "be kept to a minimum. However, any derogatory language towards", "here.\\n{arrow} <#876798696078065694> You can give suggestions for improving Kanna Chan", "**P.S: YOU CAN PING ANY STAFF MEMBER OR DEVELOPER WHILE", "feel mistreated DM an Admin and we will resolve the", "So, stay tuned!\", color=0xfc74c6) emb.add_field( name=f\"{kwee} __ROLES__\", value=f\"{arrow} <@&876800883441156138> The", "way you want to be treated.\", inline=False ) emb.add_field( name=f\"{heart}", "857835279259664403 @commands.command() @commands.is_owner() async def sabout(self, ctx): kana = self.client.get_user(self.kana_id)", ") await ctx.send(embed=emb) @commands.Cog.listener() async def on_member_join(self, member): if member.guild.id", "in there.\", inline=False ) emb.add_field( name=f\"{heart} **DECISIONS AND ISSUES**\", value", "channels without permission of the people already in there.\", inline=False", "The general chat for the server.\\n{arrow} <#876798809819189249> Bot Commands should", "relevant and provides actual value (Video/Art)\", inline=False ) emb.add_field( name=f\"{heart}", "name=f\"{ksmug} __CHANNELS__\", value=f\"{arrow} <#877030933847490691> Read the rules here.\\n{arrow} <#877031867440832574> Channel", "Community Guidelines**\", value=f\"{arrow} You can find them here: https://discordapp.com/guidelines\", inline=False", "a nice stay in the server :)\" await member.send(desc) else:", "perms by deault.\\n{arrow} **PS: APART FROM THESE SELF-ROLES ARE ALSO", "of <NAME> have this role.\\n{arrow} <@&876804164661944340> All other users who", "f\"{member.name} Thanks for joining Kanna's Server. The server is currently", "inline=False ) emb.add_field( name=f\"{heart} **CHANGES**\", value = f\"{arrow} ***Your presence", "value = f\"{arrow} ***Your presence in this server implies accepting", "value (Video/Art)\", inline=False ) emb.add_field( name=f\"{heart} **No offensive names and", "about to be done here! So, stay tuned!\", color=0xfc74c6) emb.add_field(", "DM an Admin and we will resolve the issue.***\", inline=False", "BUG OR IN CASE OF ANY QUERY.**\", inline=False ) emb.set_footer(", ") emb.add_field( name=f\"{heart} **No pornographic/adult/other NSFW material**\", value=f\"{arrow} This is", "this server implies accepting these rules, including all further changes.", "any time without notice, it is your responsibility to check", "responsibility to check for them.***\", inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url", "chat by spamming.\", inline=False ) emb.add_field( name=f\"{heart} **No pornographic/adult/other NSFW", "AVAIALBLE FOR MEMBERS.**\", inline=False ) emb.add_field( name=f\"{ksmug} __CHANNELS__\", value=f\"{arrow} <#877030933847490691>", "sabout(self, ctx): kana = self.client.get_user(self.kana_id) about_file = discord.File(\"./images/about_server.png\") await ctx.send(file", "The highest role supposed to be only for Kanna Chan.\\n{arrow}", "you want to be treated.\", inline=False ) emb.add_field( name=f\"{heart} **No", "**Server Raiding**\", value=f\"{arrow} Raiding or mentions of raiding are not", "provides actual value (Video/Art)\", inline=False ) emb.add_field( name=f\"{heart} **No offensive", "ping any staff member or DM `aSHish#1198`. Have a nice", "\"<a:explosion_heart:877426228775227392>\" class Server(commands.Cog): def __init__(self, client): self.client = client self.kana_id", "friendly community meant for having fun, chilling and spending time", "embed perms by deault.\\n{arrow} **PS: APART FROM THESE SELF-ROLES ARE", "<#876798564704084011> The general chat for the server.\\n{arrow} <#876798809819189249> Bot Commands", "arrow = \"<a:right:877425183839891496>\" kwee = \"<:kannawee:877036162122924072>\" kdance = \"<a:kanna_dance:877038778798207016>\" kbored", "per discretion. If you feel mistreated DM an Admin and", "people already in there.\", inline=False ) emb.add_field( name=f\"{heart} **DECISIONS AND", "Server. The server is currently under construction, Thanks for being", "and spending time with others.\\n{arrow} This server has cute emotes", "value=f\"{arrow} <@&876800883441156138> The highest role supposed to be only for", "the people already in there.\", inline=False ) emb.add_field( name=f\"{heart} **DECISIONS", "any derogatory language towards any user is prohibited.\", inline=False )", "you find any in Kanna Chan.\\n{arrow} <#876798750876651530> For any other", "ANY QUERY.**\", inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb)", "the server meant to moderate the chat and maintain a", "emb.add_field( name=f\"{heart} **Server Raiding**\", value=f\"{arrow} Raiding or mentions of raiding", "report BUGS here if you find any in Kanna Chan.\\n{arrow}", "users, regardless of your liking towards them. Treat others the", "and other malicious threats are absolutely prohibited and disallowed.\", inline=False", "there.\", inline=False ) emb.add_field( name=f\"{heart} **DECISIONS AND ISSUES**\", value =", "if you find any in Kanna Chan.\\n{arrow} <#876798750876651530> For any", "spamming**\", value=f\"{arrow} Don't send a lot of small messages right", "of the Server and have the highest power and authority", "YOU CAN PING ANY STAFF MEMBER OR DEVELOPER WHILE REPORTING", "has cute emotes and a lot of fun events are", "and profile pictures**\", value=f\"{arrow} You will be asked to change", "must respect all users, regardless of your liking towards them.", "lot of fun events are about to be done here!", ") emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.command() @commands.is_owner() async", "Kanna Chan.\\n{arrow} <#876798750876651530> For any other support or query use", "regardless of your liking towards them. Treat others the way", "advertisements, whether it be for other communities or streams. You", "be asked to change your name or picture if the", "= 857835279259664403 @commands.command() @commands.is_owner() async def sabout(self, ctx): kana =", "resolve the issue.***\", inline=False ) emb.add_field( name=f\"{heart} **CHANGES**\", value =", "inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.Cog.listener() async", "<NAME> have this role.\\n{arrow} <@&876804164661944340> All other users who join", "name=f\"{heart} **Direct & Indirect Threats**\", value=f\"{arrow} Threats to other users", "CHANNELS**\", value=f\"{arrow} Do not join voice chat channels without permission", "= \"<:kannawee:877036162122924072>\" kdance = \"<a:kanna_dance:877038778798207016>\" kbored = \"<:kanna_bored:877036162827583538>\" ksmug =", "a lot of small messages right after each other. Do", "name=f\"{heart} **Be respectful**\", value=f\"You must respect all users, regardless of", "have image and embed perms by deault.\\n{arrow} **PS: APART FROM", "here if you find any in Kanna Chan.\\n{arrow} <#876798750876651530> For", "the bot **Kanna Chan**. It's a friendly community meant for", "self.client = client self.kana_id = 857835279259664403 @commands.command() @commands.is_owner() async def", "towards any user is prohibited.\", inline=False ) emb.add_field( name=f\"{heart} **No", "environment in community.\\n{arrow} <@&876801038420701196> Developer(s) of <NAME> have this role.\\n{arrow}", "your content in the media channel if it is relevant", "to be only for Kanna Chan.\\n{arrow} <@&876817811396263946> Admins of the", "Raiding or mentions of raiding are not allowed.\", inline=False )", "it is your responsibility to check for them.***\", inline=False )", "Thanks for joining Kanna's Server. The server is currently under", "member): if member.guild.id == 876798564704084008: if member.bot: return else: member_role", "kdance = \"<a:kanna_dance:877038778798207016>\" kbored = \"<:kanna_bored:877036162827583538>\" ksmug = \"<:kanna_smug:877038777896427560>\" heart", "other. Do not disrupt chat by spamming.\", inline=False ) emb.add_field(", "This is a community server and not meant to share", "be executed here.\\n{arrow} <#876798696078065694> You can give suggestions for improving", "or mentions of raiding are not allowed.\", inline=False ) emb.add_field(", ") emb.add_field( name=f\"{heart} **VOICE CHANNELS**\", value=f\"{arrow} Do not join voice", "do not tolerate any kind of advertisements, whether it be", "& Indirect Threats**\", value=f\"{arrow} Threats to other users of DDoS,", "__ROLES__\", value=f\"{arrow} <@&876800883441156138> The highest role supposed to be only", "<#876798720254029864> You can report BUGS here if you find any", "kept to a minimum. However, any derogatory language towards any", "if it is relevant and provides actual value (Video/Art)\", inline=False", "ctx.send(file=rule_file) emb = discord.Embed(title=f\"{kbored} RULES {kbored}\", color=0xfc74c6) emb.add_field( name=f\"{heart} **Be", "inline=False ) emb.add_field( name=f\"{ksmug} __CHANNELS__\", value=f\"{arrow} <#877030933847490691> Read the rules", "= f\"{arrow} **DRAGON LOLI'S HOME** is the official Server of", "malicious threats are absolutely prohibited and disallowed.\", inline=False ) emb.add_field(", "Admins and Mods will Mute/Kick/Ban per discretion. If you feel", "f\"{arrow} ***Your presence in this server implies accepting these rules,", "name=f\"{heart} **No Inappropriate Language**\", value=f\"{arrow} The use of profanity should", "to share this kind of material.\", inline=False ) emb.add_field( name=f\"{heart}", "not join voice chat channels without permission of the people", "should be kept to a minimum. However, any derogatory language", "Do not disrupt chat by spamming.\", inline=False ) emb.add_field( name=f\"{heart}", "for grabbing self-roles.\\n{arrow} <#876798564704084011> The general chat for the server.\\n{arrow}", "\"<a:right:877425183839891496>\" kwee = \"<:kannawee:877036162122924072>\" kdance = \"<a:kanna_dance:877038778798207016>\" kbored = \"<:kanna_bored:877036162827583538>\"", "to other users of DDoS, Death, DoX, abuse, and other", "moderate the chat and maintain a positive environment in community.\\n{arrow}", "to change your name or picture if the staff deems", "small messages right after each other. Do not disrupt chat", "users of DDoS, Death, DoX, abuse, and other malicious threats", "meant to share this kind of material.\", inline=False ) emb.add_field(", "Admin and we will resolve the issue.***\", inline=False ) emb.add_field(", "<@&876800883441156138> The highest role supposed to be only for Kanna", "give suggestions for improving Kanna Chan here.\\n{arrow} <#876798720254029864> You can", "picture if the staff deems them inappropriate.\", inline=False ) emb.add_field(", "= discord.File(\"./images/about_server.png\") await ctx.send(file = about_file) emb = discord.Embed(title=f\"{kdance} ABOUT", "HOME** is the official Server of the bot **Kanna Chan**.", "@commands.command() @commands.is_owner() async def rule(self, ctx): kana = self.client.get_user(self.kana_id) rule_file", "self.client.get_user(self.kana_id) rule_file = discord.File(\"./images/rules.png\") await ctx.send(file=rule_file) emb = discord.Embed(title=f\"{kbored} RULES", "name=f\"{heart} **DECISIONS AND ISSUES**\", value = f\"{arrow} ***The Admins and", "it is relevant and provides actual value (Video/Art)\", inline=False )", "https://discordapp.com/guidelines\", inline=False ) emb.add_field( name=f\"{heart} **VOICE CHANNELS**\", value=f\"{arrow} Do not", "the server :)\" await member.send(desc) else: return def setup(client): client.add_cog(Server(client))", "here.\\n{arrow} <#877031867440832574> Channel for grabbing self-roles.\\n{arrow} <#876798564704084011> The general chat", "async def sabout(self, ctx): kana = self.client.get_user(self.kana_id) about_file = discord.File(\"./images/about_server.png\")", "a community server and not meant to share this kind", "your responsibility to check for them.***\", inline=False ) emb.set_footer( text=\"<NAME>\",", "kbored = \"<:kanna_bored:877036162827583538>\" ksmug = \"<:kanna_smug:877038777896427560>\" heart = \"<a:explosion_heart:877426228775227392>\" class", "join this server get this role by default. They have", "your liking towards them. Treat others the way you want", "have the highest power and authority after owner.\\n{arrow} <@&876818242058997791> Moderators", "them inappropriate.\", inline=False ) emb.add_field( name=f\"{heart} **Server Raiding**\", value=f\"{arrow} Raiding", "Mods will Mute/Kick/Ban per discretion. If you feel mistreated DM", "in community.\\n{arrow} <@&876801038420701196> Developer(s) of <NAME> have this role.\\n{arrow} <@&876804164661944340>", "suggestions for improving Kanna Chan here.\\n{arrow} <#876798720254029864> You can report", "other users of DDoS, Death, DoX, abuse, and other malicious", "bot **Kanna Chan**. It's a friendly community meant for having", "be for other communities or streams. You can post your", "for other communities or streams. You can post your content", "Commands should be executed here.\\n{arrow} <#876798696078065694> You can give suggestions", "value=f\"{arrow} We do not tolerate any kind of advertisements, whether", "chat channels without permission of the people already in there.\",", ") emb.add_field( name=f\"{heart} **Follow the Discord Community Guidelines**\", value=f\"{arrow} You", "will Mute/Kick/Ban per discretion. If you feel mistreated DM an", "fun, chilling and spending time with others.\\n{arrow} This server has", "return else: member_role = member.guild.get_role(876804164661944340) await member.add_roles(member_role) desc = f\"{member.name}", "if the staff deems them inappropriate.\", inline=False ) emb.add_field( name=f\"{heart}", "the server.\\n{arrow} <#876798809819189249> Bot Commands should be executed here.\\n{arrow} <#876798696078065694>", "of fun events are about to be done here! So,", "emb.add_field( name=f\"{kwee} __ROLES__\", value=f\"{arrow} <@&876800883441156138> The highest role supposed to", "for joining Kanna's Server. The server is currently under construction,", "emb.add_field( name=f\"{heart} **VOICE CHANNELS**\", value=f\"{arrow} Do not join voice chat", "the highest power and authority after owner.\\n{arrow} <@&876818242058997791> Moderators of", "prohibited.\", inline=False ) emb.add_field( name=f\"{heart} **No spamming**\", value=f\"{arrow} Don't send", "done here! So, stay tuned!\", color=0xfc74c6) emb.add_field( name=f\"{kwee} __ROLES__\", value=f\"{arrow}", "**No Inappropriate Language**\", value=f\"{arrow} The use of profanity should be", "discord.ext import commands arrow = \"<a:right:877425183839891496>\" kwee = \"<:kannawee:877036162122924072>\" kdance", "a positive environment in community.\\n{arrow} <@&876801038420701196> Developer(s) of <NAME> have", "and embed perms by deault.\\n{arrow} **PS: APART FROM THESE SELF-ROLES", "community meant for having fun, chilling and spending time with", "for being an **early supporter**!! If you need any kind", "inline=False ) emb.add_field( name=f\"{heart} **No advertisements**\", value=f\"{arrow} We do not", "changes might be done at any time without notice, it", "commands arrow = \"<a:right:877425183839891496>\" kwee = \"<:kannawee:877036162122924072>\" kdance = \"<a:kanna_dance:877038778798207016>\"", "rules here.\\n{arrow} <#877031867440832574> Channel for grabbing self-roles.\\n{arrow} <#876798564704084011> The general", "the rules here.\\n{arrow} <#877031867440832574> Channel for grabbing self-roles.\\n{arrow} <#876798564704084011> The", "**Direct & Indirect Threats**\", value=f\"{arrow} Threats to other users of", "authority after owner.\\n{arrow} <@&876818242058997791> Moderators of the server meant to", "OR DEVELOPER WHILE REPORTING BUG OR IN CASE OF ANY", "else: member_role = member.guild.get_role(876804164661944340) await member.add_roles(member_role) desc = f\"{member.name} Thanks", "name or picture if the staff deems them inappropriate.\", inline=False", "content in the media channel if it is relevant and", "value = f\"{arrow} ***The Admins and Mods will Mute/Kick/Ban per", "names and profile pictures**\", value=f\"{arrow} You will be asked to", "ctx): kana = self.client.get_user(self.kana_id) about_file = discord.File(\"./images/about_server.png\") await ctx.send(file =", "of small messages right after each other. Do not disrupt", "without notice, it is your responsibility to check for them.***\",", "Kanna's Server. The server is currently under construction, Thanks for", "**DECISIONS AND ISSUES**\", value = f\"{arrow} ***The Admins and Mods", "mistreated DM an Admin and we will resolve the issue.***\",", "**No pornographic/adult/other NSFW material**\", value=f\"{arrow} This is a community server", "and provides actual value (Video/Art)\", inline=False ) emb.add_field( name=f\"{heart} **No", "need any kind of help or support just ping any", "and a lot of fun events are about to be", "<#876798750876651530> For any other support or query use this channel.\\n{arrow}", "or streams. You can post your content in the media", "or DM `aSHish#1198`. Have a nice stay in the server", "server implies accepting these rules, including all further changes. These", "= self.client.get_user(self.kana_id) about_file = discord.File(\"./images/about_server.png\") await ctx.send(file = about_file) emb", "change your name or picture if the staff deems them", "and maintain a positive environment in community.\\n{arrow} <@&876801038420701196> Developer(s) of", "\"<:kannawee:877036162122924072>\" kdance = \"<a:kanna_dance:877038778798207016>\" kbored = \"<:kanna_bored:877036162827583538>\" ksmug = \"<:kanna_smug:877038777896427560>\"", "inline=False ) emb.add_field( name=f\"{heart} **VOICE CHANNELS**\", value=f\"{arrow} Do not join", "of the bot **Kanna Chan**. It's a friendly community meant", "after owner.\\n{arrow} <@&876818242058997791> Moderators of the server meant to moderate", "further changes. These changes might be done at any time", "these rules, including all further changes. These changes might be", "your name or picture if the staff deems them inappropriate.\",", "of the server meant to moderate the chat and maintain", "inline=False ) emb.add_field( name=f\"{heart} **No spamming**\", value=f\"{arrow} Don't send a", "all further changes. These changes might be done at any", "cute emotes and a lot of fun events are about", "official Server of the bot **Kanna Chan**. It's a friendly", "text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.Cog.listener() async def on_member_join(self, member):", "def on_member_join(self, member): if member.guild.id == 876798564704084008: if member.bot: return", "FROM THESE SELF-ROLES ARE ALSO AVAIALBLE FOR MEMBERS.**\", inline=False )", "rules, including all further changes. These changes might be done", "only for Kanna Chan.\\n{arrow} <@&876817811396263946> Admins of the Server and", ") emb.add_field( name=f\"{heart} **No Inappropriate Language**\", value=f\"{arrow} The use of", "here! So, stay tuned!\", color=0xfc74c6) emb.add_field( name=f\"{kwee} __ROLES__\", value=f\"{arrow} <@&876800883441156138>", "inline=False ) emb.add_field( name=f\"{heart} **No offensive names and profile pictures**\",", "the Server and have the highest power and authority after", "material.\", inline=False ) emb.add_field( name=f\"{heart} **No advertisements**\", value=f\"{arrow} We do", "a friendly community meant for having fun, chilling and spending", "import discord from discord.ext import commands arrow = \"<a:right:877425183839891496>\" kwee", "this role.\\n{arrow} <@&876804164661944340> All other users who join this server", "this channel.\\n{arrow} **P.S: YOU CAN PING ANY STAFF MEMBER OR", "<@&876817811396263946> Admins of the Server and have the highest power", "streams. You can post your content in the media channel", "Threats**\", value=f\"{arrow} Threats to other users of DDoS, Death, DoX,", "is prohibited.\", inline=False ) emb.add_field( name=f\"{heart} **No spamming**\", value=f\"{arrow} Don't", "**No spamming**\", value=f\"{arrow} Don't send a lot of small messages", "at any time without notice, it is your responsibility to", ") emb.add_field( name=f\"{heart} **No spamming**\", value=f\"{arrow} Don't send a lot", "for the server.\\n{arrow} <#876798809819189249> Bot Commands should be executed here.\\n{arrow}", "on_member_join(self, member): if member.guild.id == 876798564704084008: if member.bot: return else:", "rule(self, ctx): kana = self.client.get_user(self.kana_id) rule_file = discord.File(\"./images/rules.png\") await ctx.send(file=rule_file)", "accepting these rules, including all further changes. These changes might", "value=f\"You must respect all users, regardless of your liking towards", "spamming.\", inline=False ) emb.add_field( name=f\"{heart} **No pornographic/adult/other NSFW material**\", value=f\"{arrow}", "icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.Cog.listener() async def on_member_join(self, member): if", "disrupt chat by spamming.\", inline=False ) emb.add_field( name=f\"{heart} **No pornographic/adult/other", "= f\"{arrow} ***The Admins and Mods will Mute/Kick/Ban per discretion.", "<@&876818242058997791> Moderators of the server meant to moderate the chat", "them. Treat others the way you want to be treated.\",", "time without notice, it is your responsibility to check for", "here: https://discordapp.com/guidelines\", inline=False ) emb.add_field( name=f\"{heart} **VOICE CHANNELS**\", value=f\"{arrow} Do", "server :)\" await member.send(desc) else: return def setup(client): client.add_cog(Server(client)) print(\">>", "after each other. Do not disrupt chat by spamming.\", inline=False", "client self.kana_id = 857835279259664403 @commands.command() @commands.is_owner() async def sabout(self, ctx):", "advertisements**\", value=f\"{arrow} We do not tolerate any kind of advertisements,", "having fun, chilling and spending time with others.\\n{arrow} This server", "derogatory language towards any user is prohibited.\", inline=False ) emb.add_field(", "other users who join this server get this role by", "messages right after each other. Do not disrupt chat by", "name=f\"{heart} **No spamming**\", value=f\"{arrow} Don't send a lot of small", "SELF-ROLES ARE ALSO AVAIALBLE FOR MEMBERS.**\", inline=False ) emb.add_field( name=f\"{ksmug}", "emotes and a lot of fun events are about to", "def __init__(self, client): self.client = client self.kana_id = 857835279259664403 @commands.command()", "The use of profanity should be kept to a minimum.", "profile pictures**\", value=f\"{arrow} You will be asked to change your", "it be for other communities or streams. You can post", "by default. They have image and embed perms by deault.\\n{arrow}", "can give suggestions for improving Kanna Chan here.\\n{arrow} <#876798720254029864> You", "server.\\n{arrow} <#876798809819189249> Bot Commands should be executed here.\\n{arrow} <#876798696078065694> You", "prohibited and disallowed.\", inline=False ) emb.add_field( name=f\"{heart} **Follow the Discord", "lot of small messages right after each other. Do not", "community.\\n{arrow} <@&876801038420701196> Developer(s) of <NAME> have this role.\\n{arrow} <@&876804164661944340> All", "= about_file) emb = discord.Embed(title=f\"{kdance} ABOUT SERVER {kdance}\",description = f\"{arrow}", "server get this role by default. They have image and", "of raiding are not allowed.\", inline=False ) emb.add_field( name=f\"{heart} **Direct", "inline=False ) emb.add_field( name=f\"{heart} **Direct & Indirect Threats**\", value=f\"{arrow} Threats", "ctx.send(embed=emb) @commands.command() @commands.is_owner() async def rule(self, ctx): kana = self.client.get_user(self.kana_id)", "others the way you want to be treated.\", inline=False )", "\"<:kanna_smug:877038777896427560>\" heart = \"<a:explosion_heart:877426228775227392>\" class Server(commands.Cog): def __init__(self, client): self.client", "language towards any user is prohibited.\", inline=False ) emb.add_field( name=f\"{heart}", "pictures**\", value=f\"{arrow} You will be asked to change your name", "heart = \"<a:explosion_heart:877426228775227392>\" class Server(commands.Cog): def __init__(self, client): self.client =", "construction, Thanks for being an **early supporter**!! If you need", "changes. These changes might be done at any time without", "IN CASE OF ANY QUERY.**\", inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url", "who join this server get this role by default. They", "member.add_roles(member_role) desc = f\"{member.name} Thanks for joining Kanna's Server. The", "self.client.get_user(self.kana_id) about_file = discord.File(\"./images/about_server.png\") await ctx.send(file = about_file) emb =", "@commands.is_owner() async def rule(self, ctx): kana = self.client.get_user(self.kana_id) rule_file =", ") emb.add_field( name=f\"{heart} **DECISIONS AND ISSUES**\", value = f\"{arrow} ***The", "treated.\", inline=False ) emb.add_field( name=f\"{heart} **No Inappropriate Language**\", value=f\"{arrow} The", "of advertisements, whether it be for other communities or streams.", "role by default. They have image and embed perms by", "is currently under construction, Thanks for being an **early supporter**!!", "Chan here.\\n{arrow} <#876798720254029864> You can report BUGS here if you", "You can find them here: https://discordapp.com/guidelines\", inline=False ) emb.add_field( name=f\"{heart}", ") emb.add_field( name=f\"{heart} **No advertisements**\", value=f\"{arrow} We do not tolerate", "***The Admins and Mods will Mute/Kick/Ban per discretion. If you", "share this kind of material.\", inline=False ) emb.add_field( name=f\"{heart} **No", "name=f\"{heart} **Follow the Discord Community Guidelines**\", value=f\"{arrow} You can find", "and we will resolve the issue.***\", inline=False ) emb.add_field( name=f\"{heart}", "kind of advertisements, whether it be for other communities or", ") emb.add_field( name=f\"{heart} **No offensive names and profile pictures**\", value=f\"{arrow}", "Language**\", value=f\"{arrow} The use of profanity should be kept to", "other malicious threats are absolutely prohibited and disallowed.\", inline=False )", "by spamming.\", inline=False ) emb.add_field( name=f\"{heart} **No pornographic/adult/other NSFW material**\",", "== 876798564704084008: if member.bot: return else: member_role = member.guild.get_role(876804164661944340) await", "Channel for grabbing self-roles.\\n{arrow} <#876798564704084011> The general chat for the", "emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.Cog.listener() async def on_member_join(self,", "tolerate any kind of advertisements, whether it be for other", "issue.***\", inline=False ) emb.add_field( name=f\"{heart} **CHANGES**\", value = f\"{arrow} ***Your", "each other. Do not disrupt chat by spamming.\", inline=False )", "deems them inappropriate.\", inline=False ) emb.add_field( name=f\"{heart} **Server Raiding**\", value=f\"{arrow}", "asked to change your name or picture if the staff", "are absolutely prohibited and disallowed.\", inline=False ) emb.add_field( name=f\"{heart} **Follow", "value=f\"{arrow} Threats to other users of DDoS, Death, DoX, abuse,", "voice chat channels without permission of the people already in", "DEVELOPER WHILE REPORTING BUG OR IN CASE OF ANY QUERY.**\",", "= client self.kana_id = 857835279259664403 @commands.command() @commands.is_owner() async def sabout(self,", "<#877031867440832574> Channel for grabbing self-roles.\\n{arrow} <#876798564704084011> The general chat for", "You can post your content in the media channel if", "is relevant and provides actual value (Video/Art)\", inline=False ) emb.add_field(", "image and embed perms by deault.\\n{arrow} **PS: APART FROM THESE", "OF ANY QUERY.**\", inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await", "about_file = discord.File(\"./images/about_server.png\") await ctx.send(file = about_file) emb = discord.Embed(title=f\"{kdance}", "an Admin and we will resolve the issue.***\", inline=False )", "not disrupt chat by spamming.\", inline=False ) emb.add_field( name=f\"{heart} **No", "emb.add_field( name=f\"{heart} **Direct & Indirect Threats**\", value=f\"{arrow} Threats to other", "Indirect Threats**\", value=f\"{arrow} Threats to other users of DDoS, Death,", "of your liking towards them. Treat others the way you", "server and not meant to share this kind of material.\",", "<#876798809819189249> Bot Commands should be executed here.\\n{arrow} <#876798696078065694> You can", "LOLI'S HOME** is the official Server of the bot **Kanna", "allowed.\", inline=False ) emb.add_field( name=f\"{heart} **Direct & Indirect Threats**\", value=f\"{arrow}", "joining Kanna's Server. The server is currently under construction, Thanks", "be only for Kanna Chan.\\n{arrow} <@&876817811396263946> Admins of the Server", "meant to moderate the chat and maintain a positive environment", "STAFF MEMBER OR DEVELOPER WHILE REPORTING BUG OR IN CASE", "if member.guild.id == 876798564704084008: if member.bot: return else: member_role =", "all users, regardless of your liking towards them. Treat others", "emb.add_field( name=f\"{ksmug} __CHANNELS__\", value=f\"{arrow} <#877030933847490691> Read the rules here.\\n{arrow} <#877031867440832574>", "ANY STAFF MEMBER OR DEVELOPER WHILE REPORTING BUG OR IN", "APART FROM THESE SELF-ROLES ARE ALSO AVAIALBLE FOR MEMBERS.**\", inline=False", "we will resolve the issue.***\", inline=False ) emb.add_field( name=f\"{heart} **CHANGES**\",", "You can report BUGS here if you find any in", "value=f\"{arrow} Don't send a lot of small messages right after", "be done at any time without notice, it is your", "chilling and spending time with others.\\n{arrow} This server has cute", "is your responsibility to check for them.***\", inline=False ) emb.set_footer(", "abuse, and other malicious threats are absolutely prohibited and disallowed.\",", "Kanna Chan here.\\n{arrow} <#876798720254029864> You can report BUGS here if", "if member.bot: return else: member_role = member.guild.get_role(876804164661944340) await member.add_roles(member_role) desc", "permission of the people already in there.\", inline=False ) emb.add_field(", "We do not tolerate any kind of advertisements, whether it", "\"<a:kanna_dance:877038778798207016>\" kbored = \"<:kanna_bored:877036162827583538>\" ksmug = \"<:kanna_smug:877038777896427560>\" heart = \"<a:explosion_heart:877426228775227392>\"", "This server has cute emotes and a lot of fun", "**No advertisements**\", value=f\"{arrow} We do not tolerate any kind of", "**CHANGES**\", value = f\"{arrow} ***Your presence in this server implies", "can post your content in the media channel if it", "Raiding**\", value=f\"{arrow} Raiding or mentions of raiding are not allowed.\",", "member.bot: return else: member_role = member.guild.get_role(876804164661944340) await member.add_roles(member_role) desc =", "of material.\", inline=False ) emb.add_field( name=f\"{heart} **No advertisements**\", value=f\"{arrow} We", "of the people already in there.\", inline=False ) emb.add_field( name=f\"{heart}", "{kdance}\",description = f\"{arrow} **DRAGON LOLI'S HOME** is the official Server", "supposed to be only for Kanna Chan.\\n{arrow} <@&876817811396263946> Admins of", ") await ctx.send(embed=emb) @commands.command() @commands.is_owner() async def rule(self, ctx): kana", "under construction, Thanks for being an **early supporter**!! If you", "for improving Kanna Chan here.\\n{arrow} <#876798720254029864> You can report BUGS", "876798564704084008: if member.bot: return else: member_role = member.guild.get_role(876804164661944340) await member.add_roles(member_role)", "FOR MEMBERS.**\", inline=False ) emb.add_field( name=f\"{ksmug} __CHANNELS__\", value=f\"{arrow} <#877030933847490691> Read", "desc = f\"{member.name} Thanks for joining Kanna's Server. The server", "this kind of material.\", inline=False ) emb.add_field( name=f\"{heart} **No advertisements**\",", "name=f\"{heart} **No offensive names and profile pictures**\", value=f\"{arrow} You will", "any user is prohibited.\", inline=False ) emb.add_field( name=f\"{heart} **No spamming**\",", "discord.File(\"./images/about_server.png\") await ctx.send(file = about_file) emb = discord.Embed(title=f\"{kdance} ABOUT SERVER", "find them here: https://discordapp.com/guidelines\", inline=False ) emb.add_field( name=f\"{heart} **VOICE CHANNELS**\",", "color=0xfc74c6) emb.add_field( name=f\"{heart} **Be respectful**\", value=f\"You must respect all users,", "await ctx.send(file=rule_file) emb = discord.Embed(title=f\"{kbored} RULES {kbored}\", color=0xfc74c6) emb.add_field( name=f\"{heart}", "other communities or streams. You can post your content in", "emb = discord.Embed(title=f\"{kdance} ABOUT SERVER {kdance}\",description = f\"{arrow} **DRAGON LOLI'S", "inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await ctx.send(embed=emb) @commands.command() @commands.is_owner()", "channel.\\n{arrow} **P.S: YOU CAN PING ANY STAFF MEMBER OR DEVELOPER", "be done here! So, stay tuned!\", color=0xfc74c6) emb.add_field( name=f\"{kwee} __ROLES__\",", "by deault.\\n{arrow} **PS: APART FROM THESE SELF-ROLES ARE ALSO AVAIALBLE", "to be treated.\", inline=False ) emb.add_field( name=f\"{heart} **No Inappropriate Language**\",", "REPORTING BUG OR IN CASE OF ANY QUERY.**\", inline=False )", "support or query use this channel.\\n{arrow} **P.S: YOU CAN PING", "<@&876801038420701196> Developer(s) of <NAME> have this role.\\n{arrow} <@&876804164661944340> All other", "power and authority after owner.\\n{arrow} <@&876818242058997791> Moderators of the server", "are not allowed.\", inline=False ) emb.add_field( name=f\"{heart} **Direct & Indirect", "= \"<a:explosion_heart:877426228775227392>\" class Server(commands.Cog): def __init__(self, client): self.client = client", "**DRAGON LOLI'S HOME** is the official Server of the bot", "without permission of the people already in there.\", inline=False )", "grabbing self-roles.\\n{arrow} <#876798564704084011> The general chat for the server.\\n{arrow} <#876798809819189249>", "are about to be done here! So, stay tuned!\", color=0xfc74c6)", "= discord.Embed(title=f\"{kdance} ABOUT SERVER {kdance}\",description = f\"{arrow} **DRAGON LOLI'S HOME**", "OR IN CASE OF ANY QUERY.**\", inline=False ) emb.set_footer( text=\"<NAME>\",", "not allowed.\", inline=False ) emb.add_field( name=f\"{heart} **Direct & Indirect Threats**\",", "of DDoS, Death, DoX, abuse, and other malicious threats are", "can find them here: https://discordapp.com/guidelines\", inline=False ) emb.add_field( name=f\"{heart} **VOICE", "staff member or DM `aSHish#1198`. Have a nice stay in", "emb.add_field( name=f\"{heart} **CHANGES**\", value = f\"{arrow} ***Your presence in this", "support just ping any staff member or DM `aSHish#1198`. Have", "minimum. However, any derogatory language towards any user is prohibited.\",", "inline=False ) emb.add_field( name=f\"{heart} **Follow the Discord Community Guidelines**\", value=f\"{arrow}", "emb.add_field( name=f\"{heart} **No spamming**\", value=f\"{arrow} Don't send a lot of", "server is currently under construction, Thanks for being an **early", "presence in this server implies accepting these rules, including all", "WHILE REPORTING BUG OR IN CASE OF ANY QUERY.**\", inline=False", "find any in Kanna Chan.\\n{arrow} <#876798750876651530> For any other support", "MEMBER OR DEVELOPER WHILE REPORTING BUG OR IN CASE OF", "be treated.\", inline=False ) emb.add_field( name=f\"{heart} **No Inappropriate Language**\", value=f\"{arrow}", "DDoS, Death, DoX, abuse, and other malicious threats are absolutely", "highest power and authority after owner.\\n{arrow} <@&876818242058997791> Moderators of the", "ALSO AVAIALBLE FOR MEMBERS.**\", inline=False ) emb.add_field( name=f\"{ksmug} __CHANNELS__\", value=f\"{arrow}", "already in there.\", inline=False ) emb.add_field( name=f\"{heart} **DECISIONS AND ISSUES**\",", "async def rule(self, ctx): kana = self.client.get_user(self.kana_id) rule_file = discord.File(\"./images/rules.png\")", "including all further changes. These changes might be done at", "the way you want to be treated.\", inline=False ) emb.add_field(", "inline=False ) emb.add_field( name=f\"{heart} **No Inappropriate Language**\", value=f\"{arrow} The use", "value=f\"{arrow} <#877030933847490691> Read the rules here.\\n{arrow} <#877031867440832574> Channel for grabbing", "to check for them.***\", inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url )", "owner.\\n{arrow} <@&876818242058997791> Moderators of the server meant to moderate the", "whether it be for other communities or streams. You can", "= member.guild.get_role(876804164661944340) await member.add_roles(member_role) desc = f\"{member.name} Thanks for joining", "Kanna Chan.\\n{arrow} <@&876817811396263946> Admins of the Server and have the", "kana = self.client.get_user(self.kana_id) rule_file = discord.File(\"./images/rules.png\") await ctx.send(file=rule_file) emb =", "@commands.command() @commands.is_owner() async def sabout(self, ctx): kana = self.client.get_user(self.kana_id) about_file", "is the official Server of the bot **Kanna Chan**. It's", "Server of the bot **Kanna Chan**. It's a friendly community", "Chan.\\n{arrow} <#876798750876651530> For any other support or query use this", "respectful**\", value=f\"You must respect all users, regardless of your liking", "emb.add_field( name=f\"{heart} **No pornographic/adult/other NSFW material**\", value=f\"{arrow} This is a", "Inappropriate Language**\", value=f\"{arrow} The use of profanity should be kept", "await ctx.send(embed=emb) @commands.Cog.listener() async def on_member_join(self, member): if member.guild.id ==", "towards them. Treat others the way you want to be", "any staff member or DM `aSHish#1198`. Have a nice stay", "value=f\"{arrow} The use of profanity should be kept to a", "ctx.send(file = about_file) emb = discord.Embed(title=f\"{kdance} ABOUT SERVER {kdance}\",description =", "await ctx.send(file = about_file) emb = discord.Embed(title=f\"{kdance} ABOUT SERVER {kdance}\",description", "threats are absolutely prohibited and disallowed.\", inline=False ) emb.add_field( name=f\"{heart}", "Mute/Kick/Ban per discretion. If you feel mistreated DM an Admin", "= discord.Embed(title=f\"{kbored} RULES {kbored}\", color=0xfc74c6) emb.add_field( name=f\"{heart} **Be respectful**\", value=f\"You", "notice, it is your responsibility to check for them.***\", inline=False", ") emb.add_field( name=f\"{heart} **Direct & Indirect Threats**\", value=f\"{arrow} Threats to", "emb.add_field( name=f\"{heart} **DECISIONS AND ISSUES**\", value = f\"{arrow} ***The Admins", "of help or support just ping any staff member or", "member or DM `aSHish#1198`. Have a nice stay in the", "**Be respectful**\", value=f\"You must respect all users, regardless of your", "not meant to share this kind of material.\", inline=False )", "= f\"{member.name} Thanks for joining Kanna's Server. The server is", "The server is currently under construction, Thanks for being an", "ksmug = \"<:kanna_smug:877038777896427560>\" heart = \"<a:explosion_heart:877426228775227392>\" class Server(commands.Cog): def __init__(self,", "**VOICE CHANNELS**\", value=f\"{arrow} Do not join voice chat channels without", "CASE OF ANY QUERY.**\", inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url )", "discord from discord.ext import commands arrow = \"<a:right:877425183839891496>\" kwee =", "If you feel mistreated DM an Admin and we will", "the media channel if it is relevant and provides actual", "name=f\"{heart} **VOICE CHANNELS**\", value=f\"{arrow} Do not join voice chat channels", "self.kana_id = 857835279259664403 @commands.command() @commands.is_owner() async def sabout(self, ctx): kana", "emb = discord.Embed(title=f\"{kbored} RULES {kbored}\", color=0xfc74c6) emb.add_field( name=f\"{heart} **Be respectful**\",", "AND ISSUES**\", value = f\"{arrow} ***The Admins and Mods will", "kwee = \"<:kannawee:877036162122924072>\" kdance = \"<a:kanna_dance:877038778798207016>\" kbored = \"<:kanna_bored:877036162827583538>\" ksmug", "PING ANY STAFF MEMBER OR DEVELOPER WHILE REPORTING BUG OR", "Death, DoX, abuse, and other malicious threats are absolutely prohibited", "server has cute emotes and a lot of fun events", "ARE ALSO AVAIALBLE FOR MEMBERS.**\", inline=False ) emb.add_field( name=f\"{ksmug} __CHANNELS__\",", "kana = self.client.get_user(self.kana_id) about_file = discord.File(\"./images/about_server.png\") await ctx.send(file = about_file)", "to moderate the chat and maintain a positive environment in", "check for them.***\", inline=False ) emb.set_footer( text=\"<NAME>\", icon_url=kana.avatar_url ) await", "async def on_member_join(self, member): if member.guild.id == 876798564704084008: if member.bot:", "Chan**. It's a friendly community meant for having fun, chilling", "disallowed.\", inline=False ) emb.add_field( name=f\"{heart} **Follow the Discord Community Guidelines**\",", "def sabout(self, ctx): kana = self.client.get_user(self.kana_id) about_file = discord.File(\"./images/about_server.png\") await", "emb.add_field( name=f\"{heart} **No advertisements**\", value=f\"{arrow} We do not tolerate any", "to a minimum. However, any derogatory language towards any user", ":)\" await member.send(desc) else: return def setup(client): client.add_cog(Server(client)) print(\">> Server", "any in Kanna Chan.\\n{arrow} <#876798750876651530> For any other support or", "currently under construction, Thanks for being an **early supporter**!! If", ") emb.add_field( name=f\"{ksmug} __CHANNELS__\", value=f\"{arrow} <#877030933847490691> Read the rules here.\\n{arrow}", "from discord.ext import commands arrow = \"<a:right:877425183839891496>\" kwee = \"<:kannawee:877036162122924072>\"", "<@&876804164661944340> All other users who join this server get this", "ctx): kana = self.client.get_user(self.kana_id) rule_file = discord.File(\"./images/rules.png\") await ctx.send(file=rule_file) emb", "material**\", value=f\"{arrow} This is a community server and not meant", "Developer(s) of <NAME> have this role.\\n{arrow} <@&876804164661944340> All other users", "emb.add_field( name=f\"{heart} **No Inappropriate Language**\", value=f\"{arrow} The use of profanity", "= \"<:kanna_bored:877036162827583538>\" ksmug = \"<:kanna_smug:877038777896427560>\" heart = \"<a:explosion_heart:877426228775227392>\" class Server(commands.Cog):", "to be done here! So, stay tuned!\", color=0xfc74c6) emb.add_field( name=f\"{kwee}" ]
[ "o = oct(d) b = bin(d) print ch, d, h,", "ch in \"Hello world!\": d = ord(ch) h = hex(d)", "= oct(d) b = bin(d) print ch, d, h, o,", "h = hex(d) o = oct(d) b = bin(d) print", "world!\": d = ord(ch) h = hex(d) o = oct(d)", "hex(d) o = oct(d) b = bin(d) print ch, d,", "\"Hello world!\": d = ord(ch) h = hex(d) o =", "= hex(d) o = oct(d) b = bin(d) print ch,", "oct(d) b = bin(d) print ch, d, h, o, b", "= ord(ch) h = hex(d) o = oct(d) b =", "d = ord(ch) h = hex(d) o = oct(d) b", "in \"Hello world!\": d = ord(ch) h = hex(d) o", "for ch in \"Hello world!\": d = ord(ch) h =", "ord(ch) h = hex(d) o = oct(d) b = bin(d)" ]
[ "paho import paho.mqtt as mqtt import ssl def _on_connect(c, userdata,", "auth is not None: username = auth['username'] try: password =", "if > 1. if msg_count == 1: messages = None", "0: c.disconnect() def callback(callback, topics, qos=0, userdata=None, hostname=\"localhost\", port=1883, client_id=\"\",", "parameters are optional and will default to None if not", "is applied to all topics. userdata : passed to the", "password) if will is not None: will_topic = will['topic'] try:", "which results in the client using the default behaviour -", "'userdata':userdata} client = paho.Client(client_id=client_id, userdata=callback_userdata, protocol=protocol, transport=transport) client.on_message = _on_message_callback", "are optional and will default to None, 0 and False", "set to False, retained messages will be ignored. This means", "must be > 0') # Set ourselves up to return", "True: return userdata['msg_count'] = userdata['msg_count'] - 1 if userdata['messages'] is", "no authentication is to be used. tls : a dict", "= <PASSWORD> client.username_pw_set(username, password) if will is not None: will_topic", "to \"tcp\" to use the default setting of transport which", "Set ourselves up to return a single message if msg_count", "1 then a list of MQTTMessages will be returned. retained", "except KeyError: keyfile = None try: tls_version = tls['tls_version'] except", "returns one or messages matching a set of topics, and", "None, which indicates no authentication is to be used. tls", "userdata['msg_count'] == 0: return # Don't process stale retained messages", "value for the client. Defaults to 60 seconds. will :", "the broker on. Defaults to 1883. client_id : the MQTT", "c.subscribe(t, userdata['qos']) else: c.subscribe(userdata['topics'], userdata['qos']) def _on_message_callback(c, userdata, message): \"\"\"Internal", "for the client. Defaults to 60 seconds. will : a", "= {'username':\"<username>\", 'password':\"<password>\"} Username is required, password is optional and", "the retained flag set. hostname : a string containing the", "means that with retained=False and msg_count=1, the function will return", "processing the messages received. topics : either a string containing", "messages = None else: messages = [] userdata = {'retained':retained,", "will is not None: will_topic = will['topic'] try: will_payload =", "to use the default setting of transport which is raw", "messages are processed by the user provided callback. This is", "raise mqtt.MQTTException(paho.connack_string(rc)) if type(userdata['topics']) is list: for t in userdata['topics']:", "Copyright (c) 2016 <NAME> <<EMAIL>> # # All rights reserved.", "is available at # http://www.eclipse.org/org/documents/edl-v10.php. # # Contributors: # <NAME>", "def _on_message_callback(c, userdata, message): \"\"\"Internal callback\"\"\" userdata['callback'](c, userdata['userdata'], message) def", "to, or a list of topics to subscribe to. qos", ": passed to the callback hostname : a string containing", "a single topic to subscribe to, or a list of", "< 0 or qos > 2: raise ValueError('qos must be", "if msg_count < 1: raise ValueError('msg_count must be > 0')", "ourselves up to return a single message if msg_count ==", "0 and False respectively. Defaults to None, which indicates no", "# and Eclipse Distribution License v1.0 which accompany this distribution.", "the first message received that does not have the retained", "creates an MQTT client, connects to a broker and subscribes", "protocol=paho.MQTTv311, transport=\"tcp\"): \"\"\"Subscribe to a list of topics and process", "a broker and subscribes to a list of topics. Once", "TLS should not be used. transport : set to \"tcp\"", "returned. retained : If set to True, retained messages will", "to False, retained messages will be ignored. This means that", "# Copyright (c) 2016 <NAME> <<EMAIL>> # # All rights", "Defaults to None, which indicates no will should be used.", "def _on_message_simple(c, userdata, message): \"\"\"Internal callback\"\"\" if userdata['msg_count'] == 0:", "License v1.0 # and Eclipse Distribution License v1.0 which accompany", "a list of topics and return msg_count messages. This function", "at # http://www.eclipse.org/legal/epl-v10.html # and the Eclipse Distribution License is", "= tls['ciphers'] except KeyError: ciphers = None client.tls_set(ca_certs, certfile, keyfile,", "be ignored. This means that with retained=False and msg_count=1, the", "the broker to connect to. Defaults to localhost. port :", "connects to a broker and subscribes to a list of", "timeout value for the client. Defaults to 60 seconds. will", "callback hostname : a string containing the address of the", "userdata['msg_count'] == 0: userdata['messages'] = message c.disconnect() return userdata['messages'].append(message) if", "default setting of transport which is raw TCP. Set to", "None try: tls_version = tls['tls_version'] except KeyError: tls_version = ssl.PROTOCOL_SSLv23;", "message c.disconnect() return userdata['messages'].append(message) if userdata['msg_count'] == 0: c.disconnect() def", "using the default behaviour - see the paho.mqtt.client documentation. Defaults", "'messages':messages} callback(_on_message_simple, topics, qos, userdata, hostname, port, client_id, keepalive, will,", "MQTT client id to use. If \"\" or None, the", "use the default setting of transport which is raw TCP.", "= auth['username'] try: password = auth['password'] except KeyError: password =", "generate a client id automatically. keepalive : the keepalive timeout", "callback_userdata = { 'callback':callback, 'topics':topics, 'qos':qos, 'userdata':userdata} client = paho.Client(client_id=client_id,", ": a string containing the address of the broker to", "False client.will_set(will_topic, will_payload, will_qos, will_retain) if tls is not None:", "be used. tls : a dict containing TLS configuration parameters", "messages will be processed the same as non-retained messages. If", "KeyError: password = <PASSWORD> client.username_pw_set(username, password) if will is not", "== False and message.retain == True: return userdata['msg_count'] = userdata['msg_count']", "if not provided. Defaults to None, which indicates no authentication", "transport=transport) client.on_message = _on_message_callback client.on_connect = _on_connect if auth is", "== 0: userdata['messages'] = message c.disconnect() return userdata['messages'].append(message) if userdata['msg_count']", "'msg_count':msg_count, 'messages':messages} callback(_on_message_simple, topics, qos, userdata, hostname, port, client_id, keepalive,", "import ssl def _on_connect(c, userdata, flags, rc): \"\"\"Internal callback\"\"\" if", "will_payload, will_qos, will_retain) if tls is not None: ca_certs =", "broker and subscribes to a list of topics. Once \"msg_count\"", "not None: ca_certs = tls['ca_certs'] try: certfile = tls['certfile'] except", "'keyfile':\"<keyfile>\", 'tls_version':\"<tls_version>\", 'ciphers':\"<ciphers\">} ca_certs is required, all other parameters are", "userdata, message)\" for processing the messages received. topics : either", "message received that does not have the retained flag set.", "when subscribing. This is applied to all topics. msg_count :", "0 or qos > 2: raise ValueError('qos must be in", "try: ciphers = tls['ciphers'] except KeyError: ciphers = None client.tls_set(ca_certs,", "list of MQTTMessages will be returned. retained : If set", ": set to \"tcp\" to use the default setting of", "ciphers = tls['ciphers'] except KeyError: ciphers = None client.tls_set(ca_certs, certfile,", "two functions are simple(), which returns one or messages matching", "message if msg_count == 1, or a list # if", "accompany this distribution. # # The Eclipse Public License is", "_on_message_simple(c, userdata, message): \"\"\"Internal callback\"\"\" if userdata['msg_count'] == 0: return", "the address of the broker to connect to. Defaults to", "have been received, it disconnects cleanly from the broker and", "# and the Eclipse Distribution License is available at #", "authentication parameters for the client: auth = {'username':\"<username>\", 'password':\"<password>\"} Username", "containing the address of the broker to connect to. Defaults", "will should be used. auth : a dict containing authentication", "that TLS should not be used. transport : set to", "\"\"\" if msg_count < 1: raise ValueError('msg_count must be >", "try: will_payload = will['payload'] except KeyError: will_payload = None try:", "userdata['topics']: c.subscribe(t, userdata['qos']) else: c.subscribe(userdata['topics'], userdata['qos']) def _on_message_callback(c, userdata, message):", "to return a single message if msg_count == 1, or", "if type(userdata['topics']) is list: for t in userdata['topics']: c.subscribe(t, userdata['qos'])", "be returned. if msg_count > 1 then a list of", "to 1883. client_id : the MQTT client id to use.", "This function creates an MQTT client, connects to a broker", "be in the range 0-2') callback_userdata = { 'callback':callback, 'topics':topics,", "> 2: raise ValueError('qos must be in the range 0-2')", "in the range 0-2') callback_userdata = { 'callback':callback, 'topics':topics, 'qos':qos,", "helper functions to allow straightforward subscribing to topics and retrieving", "msg_count messages. This function creates an MQTT client, connects to", "list of topics to subscribe to. qos : the qos", "and returns the messages. topics : either a string containing", "userdata=callback_userdata, protocol=protocol, transport=transport) client.on_message = _on_message_callback client.on_connect = _on_connect if", "that with retained=False and msg_count=1, the function will return the", "client using the default behaviour - see the paho.mqtt.client documentation.", "False and message.retain == True: return userdata['msg_count'] = userdata['msg_count'] -", "client_id : the MQTT client id to use. If \"\"", "return a single message if msg_count == 1, or a", "and will default to None if not provided. Defaults to", "Defaults to None, which indicates that TLS should not be", "userdata, message): \"\"\"Internal callback\"\"\" if userdata['msg_count'] == 0: return #", "form \"on_message(client, userdata, message)\" for processing the messages received. topics", "topics and return msg_count messages. This function creates an MQTT", "== 1 then a single MQTTMessage will be returned. if", "Eclipse Distribution License is available at # http://www.eclipse.org/org/documents/edl-v10.php. # #", "Distribution License is available at # http://www.eclipse.org/org/documents/edl-v10.php. # # Contributors:", "not provided, which results in the client using the default", "a single message if msg_count == 1, or a list", "message): \"\"\"Internal callback\"\"\" userdata['callback'](c, userdata['userdata'], message) def _on_message_simple(c, userdata, message):", "2: raise ValueError('qos must be in the range 0-2') callback_userdata", "will['retain'] except KeyError: will_retain = False client.will_set(will_topic, will_payload, will_qos, will_retain)", "to a broker and subscribes to a list of topics.", "functions are simple(), which returns one or messages matching a", "string containing the address of the broker to connect to.", "tls : a dict containing TLS configuration parameters for the", "indicates that TLS should not be used. transport : set", "callback(_on_message_simple, topics, qos, userdata, hostname, port, client_id, keepalive, will, auth,", "or messages matching a set of topics, and callback() which", "0 try: will_retain = will['retain'] except KeyError: will_retain = False", "and will default to None, 0 and False respectively. Defaults", "else: messages = [] userdata = {'retained':retained, 'msg_count':msg_count, 'messages':messages} callback(_on_message_simple,", "userdata['qos']) def _on_message_callback(c, userdata, message): \"\"\"Internal callback\"\"\" userdata['callback'](c, userdata['userdata'], message)", "protocol=paho.MQTTv311, transport=\"tcp\"): \"\"\"Subscribe to a list of topics and return", "keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport=\"tcp\"): \"\"\"Subscribe to a list", "topic to subscribe to, or a list of topics to", "retained : If set to True, retained messages will be", "seconds. will : a dict containing will parameters for the", "tls=None, protocol=paho.MQTTv311, transport=\"tcp\"): \"\"\"Subscribe to a list of topics and", "client.on_connect = _on_connect if auth is not None: username =", "which indicates no authentication is to be used. tls :", "messages. topics : either a string containing a single topic", "string containing a single topic to subscribe to, or a", "port=1883, client_id=\"\", keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport=\"tcp\"): \"\"\"Subscribe to", "a string containing the address of the broker to connect", "The two functions are simple(), which returns one or messages", "msg_count == 1 then a single MQTTMessage will be returned.", "automatically. keepalive : the keepalive timeout value for the client.", "def simple(topics, qos=0, msg_count=1, retained=True, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60, will=None,", "functions to allow straightforward subscribing to topics and retrieving messages.", "authentication is to be used. tls : a dict containing", "= will['payload'] except KeyError: will_payload = None try: will_qos =", "is not None: ca_certs = tls['ca_certs'] try: certfile = tls['certfile']", "use WebSockets as the transport. \"\"\" if msg_count < 1:", "Username is required, password is optional and will default to", "default to None if not provided, which results in the", "\"msg_count\" messages have been received, it disconnects cleanly from the", "userdata['messages'] is None and userdata['msg_count'] == 0: userdata['messages'] = message", "not have the retained flag set. hostname : a string", "to use. If \"\" or None, the Paho library will", "the default setting of transport which is raw TCP. Set", "it disconnects cleanly from the broker and returns the messages.", "used. transport : set to \"tcp\" to use the default", "return # Don't process stale retained messages if 'retained' was", "function will return the first message received that does not", "distribution. # # The Eclipse Public License is available at", "indicates no will should be used. auth : a dict", "and return msg_count messages. This function creates an MQTT client,", "returns the messages. topics : either a string containing a", "provides some helper functions to allow straightforward subscribing to topics", "{'retained':retained, 'msg_count':msg_count, 'messages':messages} callback(_on_message_simple, topics, qos, userdata, hostname, port, client_id,", "same as non-retained messages. If set to False, retained messages", "parameters for the client: auth = {'username':\"<username>\", 'password':\"<password>\"} Username is", "port, client_id, keepalive, will, auth, tls, protocol, transport) return userdata['messages']", "and Eclipse Distribution License v1.0 which accompany this distribution. #", "of messages. \"\"\" import paho.mqtt.client as paho import paho.mqtt as", "userdata : passed to the callback hostname : a string", "hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport=\"tcp\"): \"\"\"Subscribe", "userdata['messages'].append(message) if userdata['msg_count'] == 0: c.disconnect() def callback(callback, topics, qos=0,", "use when subscribing. This is applied to all topics. msg_count", "setting of transport which is raw TCP. Set to \"websockets\"", "the callback hostname : a string containing the address of", "'payload':\"<payload\">, 'qos':<qos>, 'retain':<retain>}. Topic is required, all other parameters are", "tls['keyfile'] except KeyError: keyfile = None try: tls_version = tls['tls_version']", "and the accompanying materials # are made available under the", "hostname, port, client_id, keepalive, will, auth, tls, protocol, transport) return", "which is raw TCP. Set to \"websockets\" to use WebSockets", "in a callback function. This function creates an MQTT client,", "Public License is available at # http://www.eclipse.org/legal/epl-v10.html # and the", "http://www.eclipse.org/legal/epl-v10.html # and the Eclipse Distribution License is available at", "provided callback. This is a blocking function and will never", "respectively. Defaults to None, which indicates no will should be", "on. Defaults to 1883. client_id : the MQTT client id", "will_topic = will['topic'] try: will_payload = will['payload'] except KeyError: will_payload", "= { 'callback':callback, 'topics':topics, 'qos':qos, 'userdata':userdata} client = paho.Client(client_id=client_id, userdata=callback_userdata,", "This program and the accompanying materials # are made available", "if msg_count == 1: messages = None else: messages =", "< 1: raise ValueError('msg_count must be > 0') # Set", "\"\"\"Subscribe to a list of topics and process them in", "<<EMAIL>> # # All rights reserved. This program and the", ": either a string containing a single topic to subscribe", "\"\"\"Internal callback\"\"\" userdata['callback'](c, userdata['userdata'], message) def _on_message_simple(c, userdata, message): \"\"\"Internal", "tls_version=tls_version, ciphers=ciphers) client.connect(hostname, port, keepalive) client.loop_forever() def simple(topics, qos=0, msg_count=1,", "will generate a client id automatically. keepalive : the keepalive", "keepalive timeout value for the client. Defaults to 60 seconds.", "except KeyError: will_payload = None try: will_qos = will['qos'] except", "Defaults to 1883. client_id : the MQTT client id to", "True, retained messages will be processed the same as non-retained", "if msg_count > 1 then a list of MQTTMessages will", "the user provided callback. This is a blocking function and", "for the client: will = {'topic': \"<topic>\", 'payload':\"<payload\">, 'qos':<qos>, 'retain':<retain>}.", "will be processed the same as non-retained messages. If set", "and userdata['msg_count'] == 0: userdata['messages'] = message c.disconnect() return userdata['messages'].append(message)", "subscribing. This is applied to all topics. msg_count : the", "try: password = auth['password'] except KeyError: password = <PASSWORD> client.username_pw_set(username,", "transport=\"tcp\"): \"\"\"Subscribe to a list of topics and return msg_count", "v1.0 # and Eclipse Distribution License v1.0 which accompany this", "is None and userdata['msg_count'] == 0: userdata['messages'] = message c.disconnect()", "{'topic': \"<topic>\", 'payload':\"<payload\">, 'qos':<qos>, 'retain':<retain>}. Topic is required, all other", "a list of topics. Incoming messages are processed by the", "to None if not provided. Defaults to None, which indicates", "c.subscribe(userdata['topics'], userdata['qos']) def _on_message_callback(c, userdata, message): \"\"\"Internal callback\"\"\" userdata['callback'](c, userdata['userdata'],", "matching a set of topics, and callback() which allows you", "no will should be used. auth : a dict containing", "to a list of topics. Once \"msg_count\" messages have been", "\"tcp\" to use the default setting of transport which is", "id automatically. keepalive : the keepalive timeout value for the", "process stale retained messages if 'retained' was false if userdata['retained']", "accompanying materials # are made available under the terms of", "not be used. transport : set to \"tcp\" to use", "the messages received. topics : either a string containing a", "= None else: messages = [] userdata = {'retained':retained, 'msg_count':msg_count,", "return userdata['msg_count'] = userdata['msg_count'] - 1 if userdata['messages'] is None", "- initial API and implementation \"\"\" This module provides some", "Topic is required, all other parameters are optional and will", "messages. The two functions are simple(), which returns one or", "transport. \"\"\" if msg_count < 1: raise ValueError('msg_count must be", "and will default to None if not provided, which results", "function creates an MQTT client, connects to a broker and", "Set to \"websockets\" to use WebSockets as the transport. \"\"\"", "list of topics. Once \"msg_count\" messages have been received, it", "callback\"\"\" userdata['callback'](c, userdata['userdata'], message) def _on_message_simple(c, userdata, message): \"\"\"Internal callback\"\"\"", "KeyError: will_payload = None try: will_qos = will['qos'] except KeyError:", "subscribing to topics and retrieving messages. The two functions are", "the qos to use when subscribing. This is applied to", "'topics':topics, 'qos':qos, 'userdata':userdata} client = paho.Client(client_id=client_id, userdata=callback_userdata, protocol=protocol, transport=transport) client.on_message", "not provided. Defaults to None, which indicates no authentication is", "userdata, message): \"\"\"Internal callback\"\"\" userdata['callback'](c, userdata['userdata'], message) def _on_message_simple(c, userdata,", "documentation. Defaults to None, which indicates that TLS should not", "and will never return. callback : function of the form", "then a single MQTTMessage will be returned. if msg_count >", "containing authentication parameters for the client: auth = {'username':\"<username>\", 'password':\"<password>\"}", "API and implementation \"\"\" This module provides some helper functions", "if msg_count == 1, or a list # if >", "\"\"\"Internal callback\"\"\" if rc != 0: raise mqtt.MQTTException(paho.connack_string(rc)) if type(userdata['topics'])", "the client: will = {'topic': \"<topic>\", 'payload':\"<payload\">, 'qos':<qos>, 'retain':<retain>}. Topic", "keyfile, tls_version=tls_version, ciphers=ciphers) client.connect(hostname, port, keepalive) client.loop_forever() def simple(topics, qos=0,", "the transport. \"\"\" if msg_count < 1: raise ValueError('msg_count must", "to a list of topics and process them in a", "to all topics. msg_count : the number of messages to", "certfile = None try: keyfile = tls['keyfile'] except KeyError: keyfile", "= None try: tls_version = tls['tls_version'] except KeyError: tls_version =", "a client id automatically. keepalive : the keepalive timeout value", "an MQTT client, connects to a broker and subscribes to", "if msg_count == 1 then a single MQTTMessage will be", "msg_count == 1, or a list # if > 1.", "callback for processing of messages. \"\"\" import paho.mqtt.client as paho", "userdata=None, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport=\"tcp\"):", "0: userdata['messages'] = message c.disconnect() return userdata['messages'].append(message) if userdata['msg_count'] ==", "a list of MQTTMessages will be returned. retained : If", "program and the accompanying materials # are made available under", "tls is not None: ca_certs = tls['ca_certs'] try: certfile =", "dict containing authentication parameters for the client: auth = {'username':\"<username>\",", "was false if userdata['retained'] == False and message.retain == True:", "callback() which allows you to pass a callback for processing", "messages = [] userdata = {'retained':retained, 'msg_count':msg_count, 'messages':messages} callback(_on_message_simple, topics,", "tls['certfile'] except KeyError: certfile = None try: keyfile = tls['keyfile']", "msg_count=1, retained=True, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311,", "Contributors: # <NAME> - initial API and implementation \"\"\" This", "== True: return userdata['msg_count'] = userdata['msg_count'] - 1 if userdata['messages']", "processed by the user provided callback. This is a blocking", "be > 0') # Set ourselves up to return a", "some helper functions to allow straightforward subscribing to topics and", "library will generate a client id automatically. keepalive : the", "that does not have the retained flag set. hostname :", "c.disconnect() return userdata['messages'].append(message) if userdata['msg_count'] == 0: c.disconnect() def callback(callback,", "message) def _on_message_simple(c, userdata, message): \"\"\"Internal callback\"\"\" if userdata['msg_count'] ==", "message): \"\"\"Internal callback\"\"\" if userdata['msg_count'] == 0: return # Don't", "will default to None, 0 and False respectively. Defaults to", "them in a callback function. This function creates an MQTT", "use WebSockets as the transport. \"\"\" if qos < 0", "is required, password is optional and will default to None", "the Paho library will generate a client id automatically. keepalive", "MQTT client, connects to a broker and subscribes to a", "client: will = {'topic': \"<topic>\", 'payload':\"<payload\">, 'qos':<qos>, 'retain':<retain>}. Topic is", "KeyError: certfile = None try: keyfile = tls['keyfile'] except KeyError:", "subscribes to a list of topics. Once \"msg_count\" messages have", "KeyError: tls_version = ssl.PROTOCOL_SSLv23; try: ciphers = tls['ciphers'] except KeyError:", "required, password is optional and will default to None if", "\"websockets\" to use WebSockets as the transport. \"\"\" if qos", "= ssl.PROTOCOL_SSLv23; try: ciphers = tls['ciphers'] except KeyError: ciphers =", "broker and subscribes to a list of topics. Incoming messages", "client id to use. If \"\" or None, the Paho", "default to None, 0 and False respectively. Defaults to None,", "the Eclipse Public License v1.0 # and Eclipse Distribution License", "topics, qos, userdata, hostname, port, client_id, keepalive, will, auth, tls,", "# The Eclipse Public License is available at # http://www.eclipse.org/legal/epl-v10.html", "set to \"tcp\" to use the default setting of transport", "== 0: return # Don't process stale retained messages if", "(c) 2016 <NAME> <<EMAIL>> # # All rights reserved. This", "subscribe to. qos : the qos to use when subscribing.", "containing will parameters for the client: will = {'topic': \"<topic>\",", "None: ca_certs = tls['ca_certs'] try: certfile = tls['certfile'] except KeyError:", "type(userdata['topics']) is list: for t in userdata['topics']: c.subscribe(t, userdata['qos']) else:", "This is applied to all topics. msg_count : the number", "non-retained messages. If set to False, retained messages will be", "return. callback : function of the form \"on_message(client, userdata, message)\"", "cleanly from the broker and returns the messages. topics :", "single MQTTMessage will be returned. if msg_count > 1 then", "= message c.disconnect() return userdata['messages'].append(message) if userdata['msg_count'] == 0: c.disconnect()", "is optional and will default to None if not provided.", "id to use. If \"\" or None, the Paho library", "qos=0, msg_count=1, retained=True, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60, will=None, auth=None, tls=None,", "{'username':\"<username>\", 'password':\"<password>\"} Username is required, password is optional and will", "= paho.Client(client_id=client_id, userdata=callback_userdata, protocol=protocol, transport=transport) client.on_message = _on_message_callback client.on_connect =", "None: username = auth['username'] try: password = auth['password'] except KeyError:", "except KeyError: certfile = None try: keyfile = tls['keyfile'] except", "rc): \"\"\"Internal callback\"\"\" if rc != 0: raise mqtt.MQTTException(paho.connack_string(rc)) if", "received. topics : either a string containing a single topic", ": the MQTT client id to use. If \"\" or", "\"\" or None, the Paho library will generate a client", "to None, which indicates that TLS should not be used.", "msg_count=1, the function will return the first message received that", "2016 <NAME> <<EMAIL>> # # All rights reserved. This program", "qos > 2: raise ValueError('qos must be in the range", "to allow straightforward subscribing to topics and retrieving messages. The", "be used. auth : a dict containing authentication parameters for", "client.connect(hostname, port, keepalive) client.loop_forever() def simple(topics, qos=0, msg_count=1, retained=True, hostname=\"localhost\",", "will never return. callback : function of the form \"on_message(client,", "If set to False, retained messages will be ignored. This", "# <NAME> - initial API and implementation \"\"\" This module", "\"<topic>\", 'payload':\"<payload\">, 'qos':<qos>, 'retain':<retain>}. Topic is required, all other parameters", "License v1.0 which accompany this distribution. # # The Eclipse", "and message.retain == True: return userdata['msg_count'] = userdata['msg_count'] - 1", "This is a blocking function and will never return. callback", "be returned. retained : If set to True, retained messages", "to localhost. port : the port to connect to the", "for the client: auth = {'username':\"<username>\", 'password':\"<password>\"} Username is required,", "return userdata['messages'].append(message) if userdata['msg_count'] == 0: c.disconnect() def callback(callback, topics,", "default behaviour - see the paho.mqtt.client documentation. Defaults to None,", "is required, all other parameters are optional and will default", "topics and retrieving messages. The two functions are simple(), which", "\"\"\" if qos < 0 or qos > 2: raise", "= tls['tls_version'] except KeyError: tls_version = ssl.PROTOCOL_SSLv23; try: ciphers =", "required, all other parameters are optional and will default to", ": a dict containing authentication parameters for the client: auth", "= None try: keyfile = tls['keyfile'] except KeyError: keyfile =", "messages to retrieve from the broker. if msg_count == 1", "message)\" for processing the messages received. topics : either a", "by the user provided callback. This is a blocking function", "to use WebSockets as the transport. \"\"\" if qos <", "= None client.tls_set(ca_certs, certfile, keyfile, tls_version=tls_version, ciphers=ciphers) client.connect(hostname, port, keepalive)", "available at # http://www.eclipse.org/org/documents/edl-v10.php. # # Contributors: # <NAME> -", "certfile = tls['certfile'] except KeyError: certfile = None try: keyfile", "> 0') # Set ourselves up to return a single", "broker to connect to. Defaults to localhost. port : the", "the client: dict = {'ca_certs':\"<ca_certs>\", 'certfile':\"<certfile>\", 'keyfile':\"<keyfile>\", 'tls_version':\"<tls_version>\", 'ciphers':\"<ciphers\">} ca_certs", "userdata['callback'](c, userdata['userdata'], message) def _on_message_simple(c, userdata, message): \"\"\"Internal callback\"\"\" if", "are optional and will default to None if not provided,", "'password':\"<password>\"} Username is required, password is optional and will default", "retained messages will be processed the same as non-retained messages.", "# are made available under the terms of the Eclipse", "except KeyError: will_retain = False client.will_set(will_topic, will_payload, will_qos, will_retain) if", "to topics and retrieving messages. The two functions are simple(),", "of topics. Once \"msg_count\" messages have been received, it disconnects", "been received, it disconnects cleanly from the broker and returns", "will = {'topic': \"<topic>\", 'payload':\"<payload\">, 'qos':<qos>, 'retain':<retain>}. Topic is required,", "password = auth['password'] except KeyError: password = <PASSWORD> client.username_pw_set(username, password)", "msg_count : the number of messages to retrieve from the", "optional and will default to None, 0 and False respectively.", "connect to. Defaults to localhost. port : the port to", "other parameters are optional and will default to None if", "parameters for the client: dict = {'ca_certs':\"<ca_certs>\", 'certfile':\"<certfile>\", 'keyfile':\"<keyfile>\", 'tls_version':\"<tls_version>\",", "None and userdata['msg_count'] == 0: userdata['messages'] = message c.disconnect() return", "the default behaviour - see the paho.mqtt.client documentation. Defaults to", "return the first message received that does not have the", "a dict containing authentication parameters for the client: auth =", ": If set to True, retained messages will be processed", "\"\"\" This module provides some helper functions to allow straightforward", "if not provided, which results in the client using the", "hostname : a string containing the address of the broker", "except KeyError: will_qos = 0 try: will_retain = will['retain'] except", "ciphers = None client.tls_set(ca_certs, certfile, keyfile, tls_version=tls_version, ciphers=ciphers) client.connect(hostname, port,", "containing a single topic to subscribe to, or a list", "received, it disconnects cleanly from the broker and returns the", "= _on_message_callback client.on_connect = _on_connect if auth is not None:", "optional and will default to None if not provided, which", "paho.Client(client_id=client_id, userdata=callback_userdata, protocol=protocol, transport=transport) client.on_message = _on_message_callback client.on_connect = _on_connect", "_on_connect if auth is not None: username = auth['username'] try:", "qos to use when subscribing. This is applied to all", "- see the paho.mqtt.client documentation. Defaults to None, which indicates", "tls['ca_certs'] try: certfile = tls['certfile'] except KeyError: certfile = None", "try: will_retain = will['retain'] except KeyError: will_retain = False client.will_set(will_topic,", "will return the first message received that does not have", "a dict containing will parameters for the client: will =", "will['topic'] try: will_payload = will['payload'] except KeyError: will_payload = None", "messages received. topics : either a string containing a single", "will_qos = will['qos'] except KeyError: will_qos = 0 try: will_retain", "or a list of topics to subscribe to. qos :", "which allows you to pass a callback for processing of", "is applied to all topics. msg_count : the number of", "for t in userdata['topics']: c.subscribe(t, userdata['qos']) else: c.subscribe(userdata['topics'], userdata['qos']) def", "will parameters for the client: will = {'topic': \"<topic>\", 'payload':\"<payload\">,", "broker on. Defaults to 1883. client_id : the MQTT client", "# All rights reserved. This program and the accompanying materials", "0-2') callback_userdata = { 'callback':callback, 'topics':topics, 'qos':qos, 'userdata':userdata} client =", "all other parameters are optional and will default to None,", "callback\"\"\" if rc != 0: raise mqtt.MQTTException(paho.connack_string(rc)) if type(userdata['topics']) is", "to. Defaults to localhost. port : the port to connect", "This is applied to all topics. userdata : passed to", "port : the port to connect to the broker on.", "is not None: will_topic = will['topic'] try: will_payload = will['payload']", "topics. Incoming messages are processed by the user provided callback.", "single topic to subscribe to, or a list of topics", "if userdata['retained'] == False and message.retain == True: return userdata['msg_count']", "MQTTMessages will be returned. retained : If set to True,", "of topics and return msg_count messages. This function creates an", "[] userdata = {'retained':retained, 'msg_count':msg_count, 'messages':messages} callback(_on_message_simple, topics, qos, userdata,", "with retained=False and msg_count=1, the function will return the first", "is list: for t in userdata['topics']: c.subscribe(t, userdata['qos']) else: c.subscribe(userdata['topics'],", "transport=\"tcp\"): \"\"\"Subscribe to a list of topics and process them", "subscribes to a list of topics. Incoming messages are processed", "broker and returns the messages. topics : either a string", "\"on_message(client, userdata, message)\" for processing the messages received. topics :", "to use WebSockets as the transport. \"\"\" if msg_count <", "Defaults to localhost. port : the port to connect to", "None client.tls_set(ca_certs, certfile, keyfile, tls_version=tls_version, ciphers=ciphers) client.connect(hostname, port, keepalive) client.loop_forever()", "'tls_version':\"<tls_version>\", 'ciphers':\"<ciphers\">} ca_certs is required, all other parameters are optional", "== 0: c.disconnect() def callback(callback, topics, qos=0, userdata=None, hostname=\"localhost\", port=1883,", "broker. if msg_count == 1 then a single MQTTMessage will", "'ciphers':\"<ciphers\">} ca_certs is required, all other parameters are optional and", "the form \"on_message(client, userdata, message)\" for processing the messages received.", "from the broker and returns the messages. topics : either", "auth['password'] except KeyError: password = <PASSWORD> client.username_pw_set(username, password) if will", "module provides some helper functions to allow straightforward subscribing to", "the broker. if msg_count == 1 then a single MQTTMessage", "range 0-2') callback_userdata = { 'callback':callback, 'topics':topics, 'qos':qos, 'userdata':userdata} client", "not None: username = auth['username'] try: password = auth['password'] except", "tls['tls_version'] except KeyError: tls_version = ssl.PROTOCOL_SSLv23; try: ciphers = tls['ciphers']", "= {'topic': \"<topic>\", 'payload':\"<payload\">, 'qos':<qos>, 'retain':<retain>}. Topic is required, all", "1. if msg_count == 1: messages = None else: messages", "- 1 if userdata['messages'] is None and userdata['msg_count'] == 0:", "then a list of MQTTMessages will be returned. retained :", "reserved. This program and the accompanying materials # are made", "or qos > 2: raise ValueError('qos must be in the", "keyfile = tls['keyfile'] except KeyError: keyfile = None try: tls_version", "made available under the terms of the Eclipse Public License", "will_qos = 0 try: will_retain = will['retain'] except KeyError: will_retain", "This means that with retained=False and msg_count=1, the function will", "# Contributors: # <NAME> - initial API and implementation \"\"\"", "to None if not provided, which results in the client", "retrieve from the broker. if msg_count == 1 then a", "to 60 seconds. will : a dict containing will parameters", "client id automatically. keepalive : the keepalive timeout value for", "the accompanying materials # are made available under the terms", "messages. This function creates an MQTT client, connects to a", "topics, and callback() which allows you to pass a callback", "> 1 then a list of MQTTMessages will be returned.", "a string containing a single topic to subscribe to, or", "client.on_message = _on_message_callback client.on_connect = _on_connect if auth is not", "dict containing TLS configuration parameters for the client: dict =", "\"websockets\" to use WebSockets as the transport. \"\"\" if msg_count", "the MQTT client id to use. If \"\" or None,", "\"\"\"Subscribe to a list of topics and return msg_count messages.", "<PASSWORD> client.username_pw_set(username, password) if will is not None: will_topic =", "of the Eclipse Public License v1.0 # and Eclipse Distribution", "will be returned. if msg_count > 1 then a list", "import paho.mqtt.client as paho import paho.mqtt as mqtt import ssl", "WebSockets as the transport. \"\"\" if msg_count < 1: raise", "list of topics and return msg_count messages. This function creates", "userdata, flags, rc): \"\"\"Internal callback\"\"\" if rc != 0: raise", "will['payload'] except KeyError: will_payload = None try: will_qos = will['qos']", "this distribution. # # The Eclipse Public License is available", "of the form \"on_message(client, userdata, message)\" for processing the messages", "False respectively. Defaults to None, which indicates no will should", "to True, retained messages will be processed the same as", "the range 0-2') callback_userdata = { 'callback':callback, 'topics':topics, 'qos':qos, 'userdata':userdata}", "list: for t in userdata['topics']: c.subscribe(t, userdata['qos']) else: c.subscribe(userdata['topics'], userdata['qos'])", "auth=None, tls=None, protocol=paho.MQTTv311, transport=\"tcp\"): \"\"\"Subscribe to a list of topics", "will default to None if not provided. Defaults to None,", "def _on_connect(c, userdata, flags, rc): \"\"\"Internal callback\"\"\" if rc !=", "of topics, and callback() which allows you to pass a", "flag set. hostname : a string containing the address of", "qos=0, userdata=None, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311,", "None, 0 and False respectively. Defaults to None, which indicates", "and False respectively. Defaults to None, which indicates no will", "transport which is raw TCP. Set to \"websockets\" to use", "to. qos : the qos to use when subscribing. This", "localhost. port : the port to connect to the broker", "tls['ciphers'] except KeyError: ciphers = None client.tls_set(ca_certs, certfile, keyfile, tls_version=tls_version,", "else: c.subscribe(userdata['topics'], userdata['qos']) def _on_message_callback(c, userdata, message): \"\"\"Internal callback\"\"\" userdata['callback'](c,", "behaviour - see the paho.mqtt.client documentation. Defaults to None, which", "client.username_pw_set(username, password) if will is not None: will_topic = will['topic']", "and process them in a callback function. This function creates", "processing of messages. \"\"\" import paho.mqtt.client as paho import paho.mqtt", "None: will_topic = will['topic'] try: will_payload = will['payload'] except KeyError:", "topics. Once \"msg_count\" messages have been received, it disconnects cleanly", "number of messages to retrieve from the broker. if msg_count", "MQTTMessage will be returned. if msg_count > 1 then a", "disconnects cleanly from the broker and returns the messages. topics", "the port to connect to the broker on. Defaults to", "= tls['keyfile'] except KeyError: keyfile = None try: tls_version =", "all other parameters are optional and will default to None", "to a list of topics and return msg_count messages. This", "be processed the same as non-retained messages. If set to", "indicates no authentication is to be used. tls : a", "applied to all topics. userdata : passed to the callback", "= will['topic'] try: will_payload = will['payload'] except KeyError: will_payload =", "{'ca_certs':\"<ca_certs>\", 'certfile':\"<certfile>\", 'keyfile':\"<keyfile>\", 'tls_version':\"<tls_version>\", 'ciphers':\"<ciphers\">} ca_certs is required, all other", "will_retain = False client.will_set(will_topic, will_payload, will_qos, will_retain) if tls is", "= will['retain'] except KeyError: will_retain = False client.will_set(will_topic, will_payload, will_qos,", "is not None: username = auth['username'] try: password = auth['password']", "if will is not None: will_topic = will['topic'] try: will_payload", ": the number of messages to retrieve from the broker.", "up to return a single message if msg_count == 1,", "username = auth['username'] try: password = auth['password'] except KeyError: password", "try: keyfile = tls['keyfile'] except KeyError: keyfile = None try:", "and subscribes to a list of topics. Once \"msg_count\" messages", "allow straightforward subscribing to topics and retrieving messages. The two", "== 1, or a list # if > 1. if", "client: dict = {'ca_certs':\"<ca_certs>\", 'certfile':\"<certfile>\", 'keyfile':\"<keyfile>\", 'tls_version':\"<tls_version>\", 'ciphers':\"<ciphers\">} ca_certs is", "'certfile':\"<certfile>\", 'keyfile':\"<keyfile>\", 'tls_version':\"<tls_version>\", 'ciphers':\"<ciphers\">} ca_certs is required, all other parameters", "not None: will_topic = will['topic'] try: will_payload = will['payload'] except", "qos, userdata, hostname, port, client_id, keepalive, will, auth, tls, protocol,", "try: certfile = tls['certfile'] except KeyError: certfile = None try:", "All rights reserved. This program and the accompanying materials #", "initial API and implementation \"\"\" This module provides some helper", "<NAME> - initial API and implementation \"\"\" This module provides", "simple(), which returns one or messages matching a set of", "allows you to pass a callback for processing of messages.", "will default to None if not provided, which results in", "keepalive : the keepalive timeout value for the client. Defaults", "to subscribe to. qos : the qos to use when", "try: tls_version = tls['tls_version'] except KeyError: tls_version = ssl.PROTOCOL_SSLv23; try:", "to the broker on. Defaults to 1883. client_id : the", "results in the client using the default behaviour - see", "the number of messages to retrieve from the broker. if", "and the Eclipse Distribution License is available at # http://www.eclipse.org/org/documents/edl-v10.php.", "see the paho.mqtt.client documentation. Defaults to None, which indicates that", "to connect to the broker on. Defaults to 1883. client_id", "raw TCP. Set to \"websockets\" to use WebSockets as the", "the client using the default behaviour - see the paho.mqtt.client", "you to pass a callback for processing of messages. \"\"\"", "the client. Defaults to 60 seconds. will : a dict", "None, which indicates that TLS should not be used. transport", "a callback for processing of messages. \"\"\" import paho.mqtt.client as", "the Eclipse Distribution License is available at # http://www.eclipse.org/org/documents/edl-v10.php. #", "to a list of topics. Incoming messages are processed by", "the paho.mqtt.client documentation. Defaults to None, which indicates that TLS", "TCP. Set to \"websockets\" to use WebSockets as the transport.", "rights reserved. This program and the accompanying materials # are", "other parameters are optional and will default to None, 0", "v1.0 which accompany this distribution. # # The Eclipse Public", "will : a dict containing will parameters for the client:", "\"\"\"Internal callback\"\"\" if userdata['msg_count'] == 0: return # Don't process", "This module provides some helper functions to allow straightforward subscribing", "= {'ca_certs':\"<ca_certs>\", 'certfile':\"<certfile>\", 'keyfile':\"<keyfile>\", 'tls_version':\"<tls_version>\", 'ciphers':\"<ciphers\">} ca_certs is required, all", "0') # Set ourselves up to return a single message", "if 'retained' was false if userdata['retained'] == False and message.retain", "client.loop_forever() def simple(topics, qos=0, msg_count=1, retained=True, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60,", "materials # are made available under the terms of the", "will['qos'] except KeyError: will_qos = 0 try: will_retain = will['retain']", "auth['username'] try: password = auth['password'] except KeyError: password = <PASSWORD>", "msg_count > 1 then a list of MQTTMessages will be", "the messages. topics : either a string containing a single", "are simple(), which returns one or messages matching a set", "callback\"\"\" if userdata['msg_count'] == 0: return # Don't process stale", "KeyError: ciphers = None client.tls_set(ca_certs, certfile, keyfile, tls_version=tls_version, ciphers=ciphers) client.connect(hostname,", "# # All rights reserved. This program and the accompanying", "# if > 1. if msg_count == 1: messages =", "first message received that does not have the retained flag", "<NAME> <<EMAIL>> # # All rights reserved. This program and", "port, keepalive) client.loop_forever() def simple(topics, qos=0, msg_count=1, retained=True, hostname=\"localhost\", port=1883,", "implementation \"\"\" This module provides some helper functions to allow", "single message if msg_count == 1, or a list #", "'retain':<retain>}. Topic is required, all other parameters are optional and", "and retrieving messages. The two functions are simple(), which returns", "the same as non-retained messages. If set to False, retained", "Eclipse Public License v1.0 # and Eclipse Distribution License v1.0", "which indicates that TLS should not be used. transport :", "used. tls : a dict containing TLS configuration parameters for", "have the retained flag set. hostname : a string containing", "to subscribe to, or a list of topics to subscribe", "TLS configuration parameters for the client: dict = {'ca_certs':\"<ca_certs>\", 'certfile':\"<certfile>\",", "callback : function of the form \"on_message(client, userdata, message)\" for", "import paho.mqtt as mqtt import ssl def _on_connect(c, userdata, flags,", "is a blocking function and will never return. callback :", "0: raise mqtt.MQTTException(paho.connack_string(rc)) if type(userdata['topics']) is list: for t in", "None if not provided, which results in the client using", "retained flag set. hostname : a string containing the address", "transport. \"\"\" if qos < 0 or qos > 2:", "a blocking function and will never return. callback : function", "never return. callback : function of the form \"on_message(client, userdata,", "= auth['password'] except KeyError: password = <PASSWORD> client.username_pw_set(username, password) if", "to all topics. userdata : passed to the callback hostname", "topics. userdata : passed to the callback hostname : a", "qos < 0 or qos > 2: raise ValueError('qos must", "except KeyError: password = <PASSWORD> client.username_pw_set(username, password) if will is", "except KeyError: tls_version = ssl.PROTOCOL_SSLv23; try: ciphers = tls['ciphers'] except", "a list of topics. Once \"msg_count\" messages have been received,", "{ 'callback':callback, 'topics':topics, 'qos':qos, 'userdata':userdata} client = paho.Client(client_id=client_id, userdata=callback_userdata, protocol=protocol,", "to None, which indicates no authentication is to be used.", "as paho import paho.mqtt as mqtt import ssl def _on_connect(c,", "for the client: dict = {'ca_certs':\"<ca_certs>\", 'certfile':\"<certfile>\", 'keyfile':\"<keyfile>\", 'tls_version':\"<tls_version>\", 'ciphers':\"<ciphers\">}", "use. If \"\" or None, the Paho library will generate", "Eclipse Distribution License v1.0 which accompany this distribution. # #", "will_payload = None try: will_qos = will['qos'] except KeyError: will_qos", "rc != 0: raise mqtt.MQTTException(paho.connack_string(rc)) if type(userdata['topics']) is list: for", "as mqtt import ssl def _on_connect(c, userdata, flags, rc): \"\"\"Internal", "containing TLS configuration parameters for the client: dict = {'ca_certs':\"<ca_certs>\",", "provided, which results in the client using the default behaviour", "a list # if > 1. if msg_count == 1:", "function of the form \"on_message(client, userdata, message)\" for processing the", "# Don't process stale retained messages if 'retained' was false", "a list of topics to subscribe to. qos : the", "# Set ourselves up to return a single message if", "and msg_count=1, the function will return the first message received", "or None, the Paho library will generate a client id", "to be used. tls : a dict containing TLS configuration", "one or messages matching a set of topics, and callback()", "= [] userdata = {'retained':retained, 'msg_count':msg_count, 'messages':messages} callback(_on_message_simple, topics, qos,", "retrieving messages. The two functions are simple(), which returns one", "qos : the qos to use when subscribing. This is", "default to None if not provided. Defaults to None, which", "tls_version = ssl.PROTOCOL_SSLv23; try: ciphers = tls['ciphers'] except KeyError: ciphers", "if auth is not None: username = auth['username'] try: password", "If \"\" or None, the Paho library will generate a", "and callback() which allows you to pass a callback for", "License is available at # http://www.eclipse.org/org/documents/edl-v10.php. # # Contributors: #", "should not be used. transport : set to \"tcp\" to", "'callback':callback, 'topics':topics, 'qos':qos, 'userdata':userdata} client = paho.Client(client_id=client_id, userdata=callback_userdata, protocol=protocol, transport=transport)", "KeyError: keyfile = None try: tls_version = tls['tls_version'] except KeyError:", "if tls is not None: ca_certs = tls['ca_certs'] try: certfile", "of topics to subscribe to. qos : the qos to", "Eclipse Public License is available at # http://www.eclipse.org/legal/epl-v10.html # and", "if rc != 0: raise mqtt.MQTTException(paho.connack_string(rc)) if type(userdata['topics']) is list:", "_on_message_callback(c, userdata, message): \"\"\"Internal callback\"\"\" userdata['callback'](c, userdata['userdata'], message) def _on_message_simple(c,", "tls_version = tls['tls_version'] except KeyError: tls_version = ssl.PROTOCOL_SSLv23; try: ciphers", "under the terms of the Eclipse Public License v1.0 #", "client.tls_set(ca_certs, certfile, keyfile, tls_version=tls_version, ciphers=ciphers) client.connect(hostname, port, keepalive) client.loop_forever() def", "will_retain = will['retain'] except KeyError: will_retain = False client.will_set(will_topic, will_payload,", "Distribution License v1.0 which accompany this distribution. # # The", "userdata, hostname, port, client_id, keepalive, will, auth, tls, protocol, transport)", "subscribing. This is applied to all topics. userdata : passed", "messages. If set to False, retained messages will be ignored.", "blocking function and will never return. callback : function of", "stale retained messages if 'retained' was false if userdata['retained'] ==", "Once \"msg_count\" messages have been received, it disconnects cleanly from", "when subscribing. This is applied to all topics. userdata :", ": function of the form \"on_message(client, userdata, message)\" for processing", "should be used. auth : a dict containing authentication parameters", "a list of topics and process them in a callback", "_on_message_callback client.on_connect = _on_connect if auth is not None: username", "messages have been received, it disconnects cleanly from the broker", "userdata['msg_count'] - 1 if userdata['messages'] is None and userdata['msg_count'] ==", "does not have the retained flag set. hostname : a", "list of topics. Incoming messages are processed by the user", "are made available under the terms of the Eclipse Public", "1 then a single MQTTMessage will be returned. if msg_count", "messages if 'retained' was false if userdata['retained'] == False and", "terms of the Eclipse Public License v1.0 # and Eclipse", "messages. \"\"\" import paho.mqtt.client as paho import paho.mqtt as mqtt", "the terms of the Eclipse Public License v1.0 # and", "Paho library will generate a client id automatically. keepalive :", "is raw TCP. Set to \"websockets\" to use WebSockets as", "connect to the broker on. Defaults to 1883. client_id :", "set of topics, and callback() which allows you to pass", "client_id=\"\", keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport=\"tcp\"): \"\"\"Subscribe to a", "or a list # if > 1. if msg_count ==", "topics and process them in a callback function. This function", "passed to the callback hostname : a string containing the", "None, the Paho library will generate a client id automatically.", "the function will return the first message received that does", "is available at # http://www.eclipse.org/legal/epl-v10.html # and the Eclipse Distribution", "userdata['msg_count'] = userdata['msg_count'] - 1 if userdata['messages'] is None and", "= will['qos'] except KeyError: will_qos = 0 try: will_retain =", "to \"websockets\" to use WebSockets as the transport. \"\"\" if", "messages matching a set of topics, and callback() which allows", "None else: messages = [] userdata = {'retained':retained, 'msg_count':msg_count, 'messages':messages}", "c.disconnect() def callback(callback, topics, qos=0, userdata=None, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60,", ": the keepalive timeout value for the client. Defaults to", "raise ValueError('msg_count must be > 0') # Set ourselves up", ": the port to connect to the broker on. Defaults", "which returns one or messages matching a set of topics,", "of topics and process them in a callback function. This", "= False client.will_set(will_topic, will_payload, will_qos, will_retain) if tls is not", "client = paho.Client(client_id=client_id, userdata=callback_userdata, protocol=protocol, transport=transport) client.on_message = _on_message_callback client.on_connect", "transport : set to \"tcp\" to use the default setting", "retained messages will be ignored. This means that with retained=False", "as the transport. \"\"\" if msg_count < 1: raise ValueError('msg_count", "to retrieve from the broker. if msg_count == 1 then", "dict containing will parameters for the client: will = {'topic':", "optional and will default to None if not provided. Defaults", "is to be used. tls : a dict containing TLS", "will be ignored. This means that with retained=False and msg_count=1,", "pass a callback for processing of messages. \"\"\" import paho.mqtt.client", "paho.mqtt as mqtt import ssl def _on_connect(c, userdata, flags, rc):", ": a dict containing TLS configuration parameters for the client:", "msg_count < 1: raise ValueError('msg_count must be > 0') #", "for processing of messages. \"\"\" import paho.mqtt.client as paho import", "userdata['userdata'], message) def _on_message_simple(c, userdata, message): \"\"\"Internal callback\"\"\" if userdata['msg_count']", "if userdata['msg_count'] == 0: return # Don't process stale retained", "function. This function creates an MQTT client, connects to a", "'retained' was false if userdata['retained'] == False and message.retain ==", "Defaults to 60 seconds. will : a dict containing will", "password is optional and will default to None if not", "client, connects to a broker and subscribes to a list", "ca_certs = tls['ca_certs'] try: certfile = tls['certfile'] except KeyError: certfile", "applied to all topics. msg_count : the number of messages", "list of topics and process them in a callback function.", "a dict containing TLS configuration parameters for the client: dict", "mqtt import ssl def _on_connect(c, userdata, flags, rc): \"\"\"Internal callback\"\"\"", "and subscribes to a list of topics. Incoming messages are", "available at # http://www.eclipse.org/legal/epl-v10.html # and the Eclipse Distribution License", "configuration parameters for the client: dict = {'ca_certs':\"<ca_certs>\", 'certfile':\"<certfile>\", 'keyfile':\"<keyfile>\",", "and implementation \"\"\" This module provides some helper functions to", "raise ValueError('qos must be in the range 0-2') callback_userdata =", "to pass a callback for processing of messages. \"\"\" import", "None if not provided. Defaults to None, which indicates no", "simple(topics, qos=0, msg_count=1, retained=True, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60, will=None, auth=None,", "userdata = {'retained':retained, 'msg_count':msg_count, 'messages':messages} callback(_on_message_simple, topics, qos, userdata, hostname,", "parameters are optional and will default to None, 0 and", "'qos':<qos>, 'retain':<retain>}. Topic is required, all other parameters are optional", "the client: auth = {'username':\"<username>\", 'password':\"<password>\"} Username is required, password", "1, or a list # if > 1. if msg_count", "certfile, keyfile, tls_version=tls_version, ciphers=ciphers) client.connect(hostname, port, keepalive) client.loop_forever() def simple(topics,", "processed the same as non-retained messages. If set to False,", "Public License v1.0 # and Eclipse Distribution License v1.0 which", "available under the terms of the Eclipse Public License v1.0", "will_payload = will['payload'] except KeyError: will_payload = None try: will_qos", "except KeyError: ciphers = None client.tls_set(ca_certs, certfile, keyfile, tls_version=tls_version, ciphers=ciphers)", "if userdata['msg_count'] == 0: c.disconnect() def callback(callback, topics, qos=0, userdata=None,", "in the client using the default behaviour - see the", "parameters for the client: will = {'topic': \"<topic>\", 'payload':\"<payload\">, 'qos':<qos>,", "returned. if msg_count > 1 then a list of MQTTMessages", "topics, qos=0, userdata=None, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60, will=None, auth=None, tls=None,", "paho.mqtt.client documentation. Defaults to None, which indicates that TLS should", "retained=True, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport=\"tcp\"):", "to use when subscribing. This is applied to all topics.", "to None, which indicates no will should be used. auth", "= 0 try: will_retain = will['retain'] except KeyError: will_retain =", "are processed by the user provided callback. This is a", "ca_certs is required, all other parameters are optional and will", "retained=False and msg_count=1, the function will return the first message", "function and will never return. callback : function of the", "callback function. This function creates an MQTT client, connects to", "if userdata['messages'] is None and userdata['msg_count'] == 0: userdata['messages'] =", "None try: will_qos = will['qos'] except KeyError: will_qos = 0", "1883. client_id : the MQTT client id to use. If", "Defaults to None, which indicates no authentication is to be", "will be returned. retained : If set to True, retained", "60 seconds. will : a dict containing will parameters for", "1 if userdata['messages'] is None and userdata['msg_count'] == 0: userdata['messages']", "protocol=protocol, transport=transport) client.on_message = _on_message_callback client.on_connect = _on_connect if auth", "will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport=\"tcp\"): \"\"\"Subscribe to a list of", "the keepalive timeout value for the client. Defaults to 60", "as the transport. \"\"\" if qos < 0 or qos", "to the callback hostname : a string containing the address", "# # The Eclipse Public License is available at #", "dict = {'ca_certs':\"<ca_certs>\", 'certfile':\"<certfile>\", 'keyfile':\"<keyfile>\", 'tls_version':\"<tls_version>\", 'ciphers':\"<ciphers\">} ca_certs is required,", "which accompany this distribution. # # The Eclipse Public License", "a broker and subscribes to a list of topics. Incoming", "If set to True, retained messages will be processed the", "user provided callback. This is a blocking function and will", "!= 0: raise mqtt.MQTTException(paho.connack_string(rc)) if type(userdata['topics']) is list: for t", "ssl def _on_connect(c, userdata, flags, rc): \"\"\"Internal callback\"\"\" if rc", "client. Defaults to 60 seconds. will : a dict containing", "provided. Defaults to None, which indicates no authentication is to", "received that does not have the retained flag set. hostname", "of topics. Incoming messages are processed by the user provided", "\"\"\" import paho.mqtt.client as paho import paho.mqtt as mqtt import", "of the broker to connect to. Defaults to localhost. port", "at # http://www.eclipse.org/org/documents/edl-v10.php. # # Contributors: # <NAME> - initial", "subscribe to, or a list of topics to subscribe to.", "ignored. This means that with retained=False and msg_count=1, the function", "paho.mqtt.client as paho import paho.mqtt as mqtt import ssl def", "all topics. userdata : passed to the callback hostname :", "to None, 0 and False respectively. Defaults to None, which", "WebSockets as the transport. \"\"\" if qos < 0 or", "false if userdata['retained'] == False and message.retain == True: return", "process them in a callback function. This function creates an", "topics : either a string containing a single topic to", "will_retain) if tls is not None: ca_certs = tls['ca_certs'] try:", "None try: keyfile = tls['keyfile'] except KeyError: keyfile = None", ": a dict containing will parameters for the client: will", "callback. This is a blocking function and will never return.", "the transport. \"\"\" if qos < 0 or qos >", "of transport which is raw TCP. Set to \"websockets\" to", "try: will_qos = will['qos'] except KeyError: will_qos = 0 try:", "> 1. if msg_count == 1: messages = None else:", "Don't process stale retained messages if 'retained' was false if", "mqtt.MQTTException(paho.connack_string(rc)) if type(userdata['topics']) is list: for t in userdata['topics']: c.subscribe(t,", "keyfile = None try: tls_version = tls['tls_version'] except KeyError: tls_version", "port to connect to the broker on. Defaults to 1883.", "http://www.eclipse.org/org/documents/edl-v10.php. # # Contributors: # <NAME> - initial API and", "0: return # Don't process stale retained messages if 'retained'", "password = <PASSWORD> client.username_pw_set(username, password) if will is not None:", "# # Contributors: # <NAME> - initial API and implementation", "Incoming messages are processed by the user provided callback. This", "userdata['qos']) else: c.subscribe(userdata['topics'], userdata['qos']) def _on_message_callback(c, userdata, message): \"\"\"Internal callback\"\"\"", "a callback function. This function creates an MQTT client, connects", "to connect to. Defaults to localhost. port : the port", "= _on_connect if auth is not None: username = auth['username']", "the broker and returns the messages. topics : either a", "of messages to retrieve from the broker. if msg_count ==", "ValueError('msg_count must be > 0') # Set ourselves up to", "topics to subscribe to. qos : the qos to use", "which indicates no will should be used. auth : a", "straightforward subscribing to topics and retrieving messages. The two functions", "= userdata['msg_count'] - 1 if userdata['messages'] is None and userdata['msg_count']", "The Eclipse Public License is available at # http://www.eclipse.org/legal/epl-v10.html #", "# http://www.eclipse.org/legal/epl-v10.html # and the Eclipse Distribution License is available", ": the qos to use when subscribing. This is applied", "retained messages if 'retained' was false if userdata['retained'] == False", "a set of topics, and callback() which allows you to", "a single MQTTMessage will be returned. if msg_count > 1", "def callback(callback, topics, qos=0, userdata=None, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60, will=None,", "for processing the messages received. topics : either a string", "be used. transport : set to \"tcp\" to use the", "list # if > 1. if msg_count == 1: messages", "userdata['messages'] = message c.disconnect() return userdata['messages'].append(message) if userdata['msg_count'] == 0:", "if qos < 0 or qos > 2: raise ValueError('qos", "ssl.PROTOCOL_SSLv23; try: ciphers = tls['ciphers'] except KeyError: ciphers = None", "used. auth : a dict containing authentication parameters for the", "_on_connect(c, userdata, flags, rc): \"\"\"Internal callback\"\"\" if rc != 0:", "ciphers=ciphers) client.connect(hostname, port, keepalive) client.loop_forever() def simple(topics, qos=0, msg_count=1, retained=True,", "must be in the range 0-2') callback_userdata = { 'callback':callback,", "t in userdata['topics']: c.subscribe(t, userdata['qos']) else: c.subscribe(userdata['topics'], userdata['qos']) def _on_message_callback(c,", "KeyError: will_qos = 0 try: will_retain = will['retain'] except KeyError:", "<gh_stars>1-10 # Copyright (c) 2016 <NAME> <<EMAIL>> # # All", "# http://www.eclipse.org/org/documents/edl-v10.php. # # Contributors: # <NAME> - initial API", "client.will_set(will_topic, will_payload, will_qos, will_retain) if tls is not None: ca_certs", "KeyError: will_retain = False client.will_set(will_topic, will_payload, will_qos, will_retain) if tls", "auth = {'username':\"<username>\", 'password':\"<password>\"} Username is required, password is optional", "from the broker. if msg_count == 1 then a single", "1: raise ValueError('msg_count must be > 0') # Set ourselves", "set to True, retained messages will be processed the same", "userdata['msg_count'] == 0: c.disconnect() def callback(callback, topics, qos=0, userdata=None, hostname=\"localhost\",", "in userdata['topics']: c.subscribe(t, userdata['qos']) else: c.subscribe(userdata['topics'], userdata['qos']) def _on_message_callback(c, userdata,", "None, which indicates no will should be used. auth :", "all topics. msg_count : the number of messages to retrieve", "False, retained messages will be ignored. This means that with", "flags, rc): \"\"\"Internal callback\"\"\" if rc != 0: raise mqtt.MQTTException(paho.connack_string(rc))", "== 1: messages = None else: messages = [] userdata", "'qos':qos, 'userdata':userdata} client = paho.Client(client_id=client_id, userdata=callback_userdata, protocol=protocol, transport=transport) client.on_message =", "use when subscribing. This is applied to all topics. userdata", "keepalive) client.loop_forever() def simple(topics, qos=0, msg_count=1, retained=True, hostname=\"localhost\", port=1883, client_id=\"\",", "callback(callback, topics, qos=0, userdata=None, hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60, will=None, auth=None,", "= None try: will_qos = will['qos'] except KeyError: will_qos =", "message.retain == True: return userdata['msg_count'] = userdata['msg_count'] - 1 if", "topics. msg_count : the number of messages to retrieve from", "address of the broker to connect to. Defaults to localhost.", "ValueError('qos must be in the range 0-2') callback_userdata = {", "= tls['certfile'] except KeyError: certfile = None try: keyfile =", "userdata['retained'] == False and message.retain == True: return userdata['msg_count'] =", "msg_count == 1: messages = None else: messages = []", "messages will be ignored. This means that with retained=False and", "1: messages = None else: messages = [] userdata =", "either a string containing a single topic to subscribe to,", "as non-retained messages. If set to False, retained messages will", "= {'retained':retained, 'msg_count':msg_count, 'messages':messages} callback(_on_message_simple, topics, qos, userdata, hostname, port,", "License is available at # http://www.eclipse.org/legal/epl-v10.html # and the Eclipse", "return msg_count messages. This function creates an MQTT client, connects", "of MQTTMessages will be returned. retained : If set to", "auth : a dict containing authentication parameters for the client:", "set. hostname : a string containing the address of the", "will_qos, will_retain) if tls is not None: ca_certs = tls['ca_certs']", "client: auth = {'username':\"<username>\", 'password':\"<password>\"} Username is required, password is", "= tls['ca_certs'] try: certfile = tls['certfile'] except KeyError: certfile =" ]
[ "if scores else level def get_ports(component): return map(int, component.split(\"/\")) if", "recurse(component, seen, next_port, level): seen.add(component) c_a, c_b = get_ports(component) next_components", "0), scores[0][1] if scores else level def get_ports(component): return map(int,", "] for component in sample: a, b = map(int, component.split(\"/\"))", "def solution(): starting_components = d[0] best_scores = [] for component", "# '4/500' # ] for component in sample: a, b", "score, reclevel = recurse(next_component, seen.copy(), nxt_port, level + 1) scores.append((score,", "scores = sorted(scores, key=lambda x: (x[1], x[0]), reverse=True) print(component, level,", "return my_score + (scores[0][0] if scores else 0), scores[0][1] if", "= get_ports(component) nxt_port = n_a if n_b == 0 else", "(scores[0][0] if scores else 0), scores[0][1] if scores else level", "+ 1) scores.append((score, reclevel)) scores = sorted(scores, key=lambda x: (x[1],", "key=lambda x: (x[1], x[0]), reverse=True) print(component, level, scores) return my_score", "# '1/4', # '5/0', # '2/5', # '3/6', # '4/500'", "n_b = get_ports(component) nxt_port = n_a if n_b == 0", "d = defaultdict(set) # with open('aoc_day_24_sample.txt') as f: with open(\"aoc_day_24_input.txt\")", "n_a if n_b in (c_a, c_b) else n_b score, reclevel", "# '1/3', # '1/4', # '5/0', # '2/5', # '3/6',", "max(best_scores)) def recurse(component, seen, next_port, level): seen.add(component) c_a, c_b =", "seen.copy(), nxt_port, level + 1) scores.append((score, reclevel)) scores = sorted(scores,", "level + 1) scores.append((score, reclevel)) scores = sorted(scores, key=lambda x:", "if scores else 0), scores[0][1] if scores else level def", "<gh_stars>0 from collections import defaultdict def solution(): starting_components = d[0]", "= n_a if n_b in (c_a, c_b) else n_b score,", "sorted(scores, key=lambda x: (x[1], x[0]), reverse=True) print(component, level, scores) return", "'1/2', # '1/3', # '1/4', # '5/0', # '2/5', #", "[] for next_component in next_components: n_a, n_b = get_ports(next_component) nxt_port", "def recurse(component, seen, next_port, level): seen.add(component) c_a, c_b = get_ports(component)", "map(int, component.split(\"/\")) if __name__ == \"__main__\": d = defaultdict(set) #", "'1/4', # '5/0', # '2/5', # '3/6', # '4/500' #", "n_b in (c_a, c_b) else n_b score, reclevel = recurse(next_component,", "n_a if n_b == 0 else n_b best_scores.append(recurse(component, set(), nxt_port,", "if n_b == 0 else n_b best_scores.append(recurse(component, set(), nxt_port, 0))", "= recurse(next_component, seen.copy(), nxt_port, level + 1) scores.append((score, reclevel)) scores", "nxt_port, level + 1) scores.append((score, reclevel)) scores = sorted(scores, key=lambda", "1) scores.append((score, reclevel)) scores = sorted(scores, key=lambda x: (x[1], x[0]),", "level def get_ports(component): return map(int, component.split(\"/\")) if __name__ == \"__main__\":", "x: (x[1], x[0]), reverse=True) print(component, level, scores) return my_score +", "open(\"aoc_day_24_input.txt\") as f: sample = f.readlines() # sample = [", "next_components = d[next_port] - seen my_score = sum(get_ports(component)) scores =", "= get_ports(component) next_components = d[next_port] - seen my_score = sum(get_ports(component))", "= [ # '0/1', # '1/2', # '1/3', # '1/4',", "n_b = get_ports(next_component) nxt_port = n_a if n_b in (c_a,", "'5/0', # '2/5', # '3/6', # '4/500' # ] for", "next_component in next_components: n_a, n_b = get_ports(next_component) nxt_port = n_a", "as f: sample = f.readlines() # sample = [ #", "c_b = get_ports(component) next_components = d[next_port] - seen my_score =", "[] for component in starting_components: n_a, n_b = get_ports(component) nxt_port", "in sample: a, b = map(int, component.split(\"/\")) d[a].add(component) d[b].add(component) solution()", "in starting_components: n_a, n_b = get_ports(component) nxt_port = n_a if", "= sorted(scores, key=lambda x: (x[1], x[0]), reverse=True) print(component, level, scores)", "(x[1], x[0]), reverse=True) print(component, level, scores) return my_score + (scores[0][0]", "= [] for component in starting_components: n_a, n_b = get_ports(component)", "reverse=True) print(component, level, scores) return my_score + (scores[0][0] if scores", "level): seen.add(component) c_a, c_b = get_ports(component) next_components = d[next_port] -", "component.split(\"/\")) if __name__ == \"__main__\": d = defaultdict(set) # with", "sample = f.readlines() # sample = [ # '0/1', #", "collections import defaultdict def solution(): starting_components = d[0] best_scores =", "for component in starting_components: n_a, n_b = get_ports(component) nxt_port =", "(c_a, c_b) else n_b score, reclevel = recurse(next_component, seen.copy(), nxt_port,", "best_scores.append(recurse(component, set(), nxt_port, 0)) print(\"fuck\", max(best_scores)) def recurse(component, seen, next_port,", "# '3/6', # '4/500' # ] for component in sample:", "with open(\"aoc_day_24_input.txt\") as f: sample = f.readlines() # sample =", "my_score = sum(get_ports(component)) scores = [] for next_component in next_components:", "# ] for component in sample: a, b = map(int,", "= get_ports(next_component) nxt_port = n_a if n_b in (c_a, c_b)", "component in sample: a, b = map(int, component.split(\"/\")) d[a].add(component) d[b].add(component)", "seen.add(component) c_a, c_b = get_ports(component) next_components = d[next_port] - seen", "# '5/0', # '2/5', # '3/6', # '4/500' # ]", "# '0/1', # '1/2', # '1/3', # '1/4', # '5/0',", "from collections import defaultdict def solution(): starting_components = d[0] best_scores", "== 0 else n_b best_scores.append(recurse(component, set(), nxt_port, 0)) print(\"fuck\", max(best_scores))", "else n_b best_scores.append(recurse(component, set(), nxt_port, 0)) print(\"fuck\", max(best_scores)) def recurse(component,", "print(\"fuck\", max(best_scores)) def recurse(component, seen, next_port, level): seen.add(component) c_a, c_b", "scores.append((score, reclevel)) scores = sorted(scores, key=lambda x: (x[1], x[0]), reverse=True)", "= [] for next_component in next_components: n_a, n_b = get_ports(next_component)", "sum(get_ports(component)) scores = [] for next_component in next_components: n_a, n_b", "n_b == 0 else n_b best_scores.append(recurse(component, set(), nxt_port, 0)) print(\"fuck\",", "sample = [ # '0/1', # '1/2', # '1/3', #", "my_score + (scores[0][0] if scores else 0), scores[0][1] if scores", "return map(int, component.split(\"/\")) if __name__ == \"__main__\": d = defaultdict(set)", "# '2/5', # '3/6', # '4/500' # ] for component", "get_ports(component) nxt_port = n_a if n_b == 0 else n_b", "c_a, c_b = get_ports(component) next_components = d[next_port] - seen my_score", "level, scores) return my_score + (scores[0][0] if scores else 0),", "f: sample = f.readlines() # sample = [ # '0/1',", "d[0] best_scores = [] for component in starting_components: n_a, n_b", "in next_components: n_a, n_b = get_ports(next_component) nxt_port = n_a if", "'4/500' # ] for component in sample: a, b =", "solution(): starting_components = d[0] best_scores = [] for component in", "f.readlines() # sample = [ # '0/1', # '1/2', #", "if __name__ == \"__main__\": d = defaultdict(set) # with open('aoc_day_24_sample.txt')", "open('aoc_day_24_sample.txt') as f: with open(\"aoc_day_24_input.txt\") as f: sample = f.readlines()", "'2/5', # '3/6', # '4/500' # ] for component in", "nxt_port = n_a if n_b == 0 else n_b best_scores.append(recurse(component,", "next_port, level): seen.add(component) c_a, c_b = get_ports(component) next_components = d[next_port]", "for next_component in next_components: n_a, n_b = get_ports(next_component) nxt_port =", "else 0), scores[0][1] if scores else level def get_ports(component): return", "'0/1', # '1/2', # '1/3', # '1/4', # '5/0', #", "\"__main__\": d = defaultdict(set) # with open('aoc_day_24_sample.txt') as f: with", "get_ports(component) next_components = d[next_port] - seen my_score = sum(get_ports(component)) scores", "f: with open(\"aoc_day_24_input.txt\") as f: sample = f.readlines() # sample", "# sample = [ # '0/1', # '1/2', # '1/3',", "defaultdict def solution(): starting_components = d[0] best_scores = [] for", "next_components: n_a, n_b = get_ports(next_component) nxt_port = n_a if n_b", "# '1/2', # '1/3', # '1/4', # '5/0', # '2/5',", "scores) return my_score + (scores[0][0] if scores else 0), scores[0][1]", "def get_ports(component): return map(int, component.split(\"/\")) if __name__ == \"__main__\": d", "n_b best_scores.append(recurse(component, set(), nxt_port, 0)) print(\"fuck\", max(best_scores)) def recurse(component, seen,", "scores[0][1] if scores else level def get_ports(component): return map(int, component.split(\"/\"))", "starting_components: n_a, n_b = get_ports(component) nxt_port = n_a if n_b", "'3/6', # '4/500' # ] for component in sample: a,", "n_b score, reclevel = recurse(next_component, seen.copy(), nxt_port, level + 1)", "seen, next_port, level): seen.add(component) c_a, c_b = get_ports(component) next_components =", "reclevel)) scores = sorted(scores, key=lambda x: (x[1], x[0]), reverse=True) print(component,", "with open('aoc_day_24_sample.txt') as f: with open(\"aoc_day_24_input.txt\") as f: sample =", "scores = [] for next_component in next_components: n_a, n_b =", "- seen my_score = sum(get_ports(component)) scores = [] for next_component", "reclevel = recurse(next_component, seen.copy(), nxt_port, level + 1) scores.append((score, reclevel))", "0)) print(\"fuck\", max(best_scores)) def recurse(component, seen, next_port, level): seen.add(component) c_a,", "n_a, n_b = get_ports(component) nxt_port = n_a if n_b ==", "nxt_port, 0)) print(\"fuck\", max(best_scores)) def recurse(component, seen, next_port, level): seen.add(component)", "best_scores = [] for component in starting_components: n_a, n_b =", "= n_a if n_b == 0 else n_b best_scores.append(recurse(component, set(),", "print(component, level, scores) return my_score + (scores[0][0] if scores else", "set(), nxt_port, 0)) print(\"fuck\", max(best_scores)) def recurse(component, seen, next_port, level):", "scores else level def get_ports(component): return map(int, component.split(\"/\")) if __name__", "as f: with open(\"aoc_day_24_input.txt\") as f: sample = f.readlines() #", "= d[next_port] - seen my_score = sum(get_ports(component)) scores = []", "if n_b in (c_a, c_b) else n_b score, reclevel =", "== \"__main__\": d = defaultdict(set) # with open('aoc_day_24_sample.txt') as f:", "# with open('aoc_day_24_sample.txt') as f: with open(\"aoc_day_24_input.txt\") as f: sample", "component in starting_components: n_a, n_b = get_ports(component) nxt_port = n_a", "= d[0] best_scores = [] for component in starting_components: n_a,", "in (c_a, c_b) else n_b score, reclevel = recurse(next_component, seen.copy(),", "get_ports(next_component) nxt_port = n_a if n_b in (c_a, c_b) else", "'1/3', # '1/4', # '5/0', # '2/5', # '3/6', #", "import defaultdict def solution(): starting_components = d[0] best_scores = []", "= sum(get_ports(component)) scores = [] for next_component in next_components: n_a,", "= f.readlines() # sample = [ # '0/1', # '1/2',", "for component in sample: a, b = map(int, component.split(\"/\")) d[a].add(component)", "else n_b score, reclevel = recurse(next_component, seen.copy(), nxt_port, level +", "c_b) else n_b score, reclevel = recurse(next_component, seen.copy(), nxt_port, level", "x[0]), reverse=True) print(component, level, scores) return my_score + (scores[0][0] if", "starting_components = d[0] best_scores = [] for component in starting_components:", "nxt_port = n_a if n_b in (c_a, c_b) else n_b", "seen my_score = sum(get_ports(component)) scores = [] for next_component in", "__name__ == \"__main__\": d = defaultdict(set) # with open('aoc_day_24_sample.txt') as", "[ # '0/1', # '1/2', # '1/3', # '1/4', #", "d[next_port] - seen my_score = sum(get_ports(component)) scores = [] for", "defaultdict(set) # with open('aoc_day_24_sample.txt') as f: with open(\"aoc_day_24_input.txt\") as f:", "= defaultdict(set) # with open('aoc_day_24_sample.txt') as f: with open(\"aoc_day_24_input.txt\") as", "recurse(next_component, seen.copy(), nxt_port, level + 1) scores.append((score, reclevel)) scores =", "else level def get_ports(component): return map(int, component.split(\"/\")) if __name__ ==", "scores else 0), scores[0][1] if scores else level def get_ports(component):", "0 else n_b best_scores.append(recurse(component, set(), nxt_port, 0)) print(\"fuck\", max(best_scores)) def", "get_ports(component): return map(int, component.split(\"/\")) if __name__ == \"__main__\": d =", "n_a, n_b = get_ports(next_component) nxt_port = n_a if n_b in", "+ (scores[0][0] if scores else 0), scores[0][1] if scores else" ]
[ "numpy as np import network def main(): x = np.array([2,", "def main(): x = np.array([2, 3]) nw = network.NeuralNetwork() print(nw.feedforward(x))", "np import network def main(): x = np.array([2, 3]) nw", "= np.array([2, 3]) nw = network.NeuralNetwork() print(nw.feedforward(x)) if __name__ ==", "as np import network def main(): x = np.array([2, 3])", "x = np.array([2, 3]) nw = network.NeuralNetwork() print(nw.feedforward(x)) if __name__", "main(): x = np.array([2, 3]) nw = network.NeuralNetwork() print(nw.feedforward(x)) if", "3]) nw = network.NeuralNetwork() print(nw.feedforward(x)) if __name__ == \"__main__\": main()", "network def main(): x = np.array([2, 3]) nw = network.NeuralNetwork()", "import network def main(): x = np.array([2, 3]) nw =", "np.array([2, 3]) nw = network.NeuralNetwork() print(nw.feedforward(x)) if __name__ == \"__main__\":", "import numpy as np import network def main(): x =" ]
[ "[] y_fear = [] y_happy = [] y_sad = []", "def animate(i): graph_data = open('emotion.txt', 'r').read() lines = graph_data.split('\\n') xs", "#draw the predictions from real-time.py import matplotlib.pyplot as plt import", "[] y_neutral = [] for line in lines: if len(line)", "as plt import matplotlib.animation as animation from matplotlib import style", "<reponame>rmhsawyer/EC601-Final-Project-Mapping_User_Face_To_Emoji #draw the predictions from real-time.py import matplotlib.pyplot as plt", "for line in lines: if len(line) > 1: time, angry,", "sad, surprise, neutral = line.split(',') xs.append(time) y_angry.append(angry) y_fear.append(fear) y_happy.append(happy) y_sad.append(sad)", "> 1: time, angry, fear, happy, sad, surprise, neutral =", "line.split(',') xs.append(time) y_angry.append(angry) y_fear.append(fear) y_happy.append(happy) y_sad.append(sad) y_surprise.append(surprise) y_neutral.append(neutral) ax1.clear() ax1.plot(xs,", "y_angry = [] y_fear = [] y_happy = [] y_sad", "ax1.plot(xs, y_fear) ax1.plot(xs, y_happy) ax1.plot(xs, y_sad) ax1.plot(xs, y_surprise) ax1.plot(xs, y_neutral)", "ax1.clear() ax1.plot(xs, y_angry) ax1.plot(xs, y_fear) ax1.plot(xs, y_happy) ax1.plot(xs, y_sad) ax1.plot(xs,", "= open('emotion.txt', 'r').read() lines = graph_data.split('\\n') xs = [] y_angry", "= fig.add_subplot(1,1,1) def animate(i): graph_data = open('emotion.txt', 'r').read() lines =", "open('emotion.txt', 'r').read() lines = graph_data.split('\\n') xs = [] y_angry =", "[] y_surprise = [] y_neutral = [] for line in", "fig = plt.figure() ax1 = fig.add_subplot(1,1,1) def animate(i): graph_data =", "[] y_sad = [] y_surprise = [] y_neutral = []", "y_angry) ax1.plot(xs, y_fear) ax1.plot(xs, y_happy) ax1.plot(xs, y_sad) ax1.plot(xs, y_surprise) ax1.plot(xs,", "ax1.plot(xs, y_sad) ax1.plot(xs, y_surprise) ax1.plot(xs, y_neutral) ani = animation.FuncAnimation(fig, animate,", "y_sad.append(sad) y_surprise.append(surprise) y_neutral.append(neutral) ax1.clear() ax1.plot(xs, y_angry) ax1.plot(xs, y_fear) ax1.plot(xs, y_happy)", "fig.add_subplot(1,1,1) def animate(i): graph_data = open('emotion.txt', 'r').read() lines = graph_data.split('\\n')", "ax1.plot(xs, y_surprise) ax1.plot(xs, y_neutral) ani = animation.FuncAnimation(fig, animate, interval=1000) plt.show()", "matplotlib.animation as animation from matplotlib import style style.use('fivethirtyeight') fig =", "from real-time.py import matplotlib.pyplot as plt import matplotlib.animation as animation", "import matplotlib.animation as animation from matplotlib import style style.use('fivethirtyeight') fig", "animation from matplotlib import style style.use('fivethirtyeight') fig = plt.figure() ax1", "1: time, angry, fear, happy, sad, surprise, neutral = line.split(',')", "= [] y_angry = [] y_fear = [] y_happy =", "= [] y_fear = [] y_happy = [] y_sad =", "import matplotlib.pyplot as plt import matplotlib.animation as animation from matplotlib", "= [] for line in lines: if len(line) > 1:", "= line.split(',') xs.append(time) y_angry.append(angry) y_fear.append(fear) y_happy.append(happy) y_sad.append(sad) y_surprise.append(surprise) y_neutral.append(neutral) ax1.clear()", "time, angry, fear, happy, sad, surprise, neutral = line.split(',') xs.append(time)", "y_fear) ax1.plot(xs, y_happy) ax1.plot(xs, y_sad) ax1.plot(xs, y_surprise) ax1.plot(xs, y_neutral) ani", "xs.append(time) y_angry.append(angry) y_fear.append(fear) y_happy.append(happy) y_sad.append(sad) y_surprise.append(surprise) y_neutral.append(neutral) ax1.clear() ax1.plot(xs, y_angry)", "real-time.py import matplotlib.pyplot as plt import matplotlib.animation as animation from", "lines: if len(line) > 1: time, angry, fear, happy, sad,", "fear, happy, sad, surprise, neutral = line.split(',') xs.append(time) y_angry.append(angry) y_fear.append(fear)", "graph_data = open('emotion.txt', 'r').read() lines = graph_data.split('\\n') xs = []", "in lines: if len(line) > 1: time, angry, fear, happy,", "y_neutral = [] for line in lines: if len(line) >", "surprise, neutral = line.split(',') xs.append(time) y_angry.append(angry) y_fear.append(fear) y_happy.append(happy) y_sad.append(sad) y_surprise.append(surprise)", "= [] y_neutral = [] for line in lines: if", "plt import matplotlib.animation as animation from matplotlib import style style.use('fivethirtyeight')", "len(line) > 1: time, angry, fear, happy, sad, surprise, neutral", "matplotlib import style style.use('fivethirtyeight') fig = plt.figure() ax1 = fig.add_subplot(1,1,1)", "= graph_data.split('\\n') xs = [] y_angry = [] y_fear =", "xs = [] y_angry = [] y_fear = [] y_happy", "y_happy) ax1.plot(xs, y_sad) ax1.plot(xs, y_surprise) ax1.plot(xs, y_neutral) ani = animation.FuncAnimation(fig,", "animate(i): graph_data = open('emotion.txt', 'r').read() lines = graph_data.split('\\n') xs =", "y_neutral.append(neutral) ax1.clear() ax1.plot(xs, y_angry) ax1.plot(xs, y_fear) ax1.plot(xs, y_happy) ax1.plot(xs, y_sad)", "the predictions from real-time.py import matplotlib.pyplot as plt import matplotlib.animation", "happy, sad, surprise, neutral = line.split(',') xs.append(time) y_angry.append(angry) y_fear.append(fear) y_happy.append(happy)", "[] y_happy = [] y_sad = [] y_surprise = []", "= [] y_sad = [] y_surprise = [] y_neutral =", "y_angry.append(angry) y_fear.append(fear) y_happy.append(happy) y_sad.append(sad) y_surprise.append(surprise) y_neutral.append(neutral) ax1.clear() ax1.plot(xs, y_angry) ax1.plot(xs,", "import style style.use('fivethirtyeight') fig = plt.figure() ax1 = fig.add_subplot(1,1,1) def", "= [] y_surprise = [] y_neutral = [] for line", "y_sad = [] y_surprise = [] y_neutral = [] for", "lines = graph_data.split('\\n') xs = [] y_angry = [] y_fear", "ax1.plot(xs, y_happy) ax1.plot(xs, y_sad) ax1.plot(xs, y_surprise) ax1.plot(xs, y_neutral) ani =", "as animation from matplotlib import style style.use('fivethirtyeight') fig = plt.figure()", "plt.figure() ax1 = fig.add_subplot(1,1,1) def animate(i): graph_data = open('emotion.txt', 'r').read()", "[] for line in lines: if len(line) > 1: time,", "predictions from real-time.py import matplotlib.pyplot as plt import matplotlib.animation as", "graph_data.split('\\n') xs = [] y_angry = [] y_fear = []", "matplotlib.pyplot as plt import matplotlib.animation as animation from matplotlib import", "y_fear = [] y_happy = [] y_sad = [] y_surprise", "ax1 = fig.add_subplot(1,1,1) def animate(i): graph_data = open('emotion.txt', 'r').read() lines", "neutral = line.split(',') xs.append(time) y_angry.append(angry) y_fear.append(fear) y_happy.append(happy) y_sad.append(sad) y_surprise.append(surprise) y_neutral.append(neutral)", "y_fear.append(fear) y_happy.append(happy) y_sad.append(sad) y_surprise.append(surprise) y_neutral.append(neutral) ax1.clear() ax1.plot(xs, y_angry) ax1.plot(xs, y_fear)", "y_surprise = [] y_neutral = [] for line in lines:", "y_sad) ax1.plot(xs, y_surprise) ax1.plot(xs, y_neutral) ani = animation.FuncAnimation(fig, animate, interval=1000)", "= plt.figure() ax1 = fig.add_subplot(1,1,1) def animate(i): graph_data = open('emotion.txt',", "[] y_angry = [] y_fear = [] y_happy = []", "y_happy = [] y_sad = [] y_surprise = [] y_neutral", "= [] y_happy = [] y_sad = [] y_surprise =", "angry, fear, happy, sad, surprise, neutral = line.split(',') xs.append(time) y_angry.append(angry)", "y_surprise.append(surprise) y_neutral.append(neutral) ax1.clear() ax1.plot(xs, y_angry) ax1.plot(xs, y_fear) ax1.plot(xs, y_happy) ax1.plot(xs,", "if len(line) > 1: time, angry, fear, happy, sad, surprise,", "style.use('fivethirtyeight') fig = plt.figure() ax1 = fig.add_subplot(1,1,1) def animate(i): graph_data", "y_happy.append(happy) y_sad.append(sad) y_surprise.append(surprise) y_neutral.append(neutral) ax1.clear() ax1.plot(xs, y_angry) ax1.plot(xs, y_fear) ax1.plot(xs,", "'r').read() lines = graph_data.split('\\n') xs = [] y_angry = []", "ax1.plot(xs, y_angry) ax1.plot(xs, y_fear) ax1.plot(xs, y_happy) ax1.plot(xs, y_sad) ax1.plot(xs, y_surprise)", "line in lines: if len(line) > 1: time, angry, fear,", "from matplotlib import style style.use('fivethirtyeight') fig = plt.figure() ax1 =", "style style.use('fivethirtyeight') fig = plt.figure() ax1 = fig.add_subplot(1,1,1) def animate(i):" ]
[ "= 0. steps = 0 while not done: action =", "expert rollouts') args = parser.parse_args() print('loading and building expert policy')", "import argparse parser = argparse.ArgumentParser() parser.add_argument('expert_policy_file', type=str) parser.add_argument('envname', type=str) parser.add_argument('--render',", "steps % 100 == 0: print(\"%i/%i\"%(steps, env.spec.timestep_limit)) if steps >=", "parser.add_argument('expert_policy_file', type=str) parser.add_argument('envname', type=str) parser.add_argument('--render', action='store_true') parser.add_argument('--num_rollouts', type=int, default=20, help='Number", "parser.add_argument('envname', type=str) parser.add_argument('--render', action='store_true') parser.add_argument('--num_rollouts', type=int, default=20, help='Number of expert", "print('mean return', np.mean(returns)) print('std of return', np.std(returns)) if __name__ ==", "Humanoid-v1 --render \\ --num_rollouts 20 \"\"\" import numpy as np", "= np.load(args.expert_policy_file) lin_policy = lin_policy[lin_policy.files[0]] M = lin_policy[0] # mean", "std of state vectors estimated online by ARS. mean =", "return', np.mean(returns)) print('std of return', np.std(returns)) if __name__ == '__main__':", "std = lin_policy[2] env = gym.make(args.envname) returns = [] observations", "in range(args.num_rollouts): print('iter', i) obs = env.reset() done = False", "import numpy as np import gym def main(): import argparse", "gym.make(args.envname) returns = [] observations = [] actions = []", "= env.reset() done = False totalr = 0. steps =", "returns = [] observations = [] actions = [] for", "20 \"\"\" import numpy as np import gym def main():", "\"\"\" import numpy as np import gym def main(): import", "argparse.ArgumentParser() parser.add_argument('expert_policy_file', type=str) parser.add_argument('envname', type=str) parser.add_argument('--render', action='store_true') parser.add_argument('--num_rollouts', type=int, default=20,", "totalr += r steps += 1 if args.render: env.render() if", ">= env.spec.timestep_limit: break returns.append(totalr) print('returns', returns) print('mean return', np.mean(returns)) print('std", "and std of state vectors estimated online by ARS. mean", "https://github.com/berkeleydeeprlcourse. Example usage: python run_policy.py ../trained_policies/Humanoid-v1/policy_reward_11600/lin_policy_plus.npz Humanoid-v1 --render \\ --num_rollouts", "M = lin_policy[0] # mean and std of state vectors", "of state vectors estimated online by ARS. mean = lin_policy[1]", "0. steps = 0 while not done: action = np.dot(M,", "of expert rollouts') args = parser.parse_args() print('loading and building expert", "= parser.parse_args() print('loading and building expert policy') lin_policy = np.load(args.expert_policy_file)", "= 0 while not done: action = np.dot(M, (obs -", "0: print(\"%i/%i\"%(steps, env.spec.timestep_limit)) if steps >= env.spec.timestep_limit: break returns.append(totalr) print('returns',", "from https://github.com/berkeleydeeprlcourse. Example usage: python run_policy.py ../trained_policies/Humanoid-v1/policy_reward_11600/lin_policy_plus.npz Humanoid-v1 --render \\", "+= 1 if args.render: env.render() if steps % 100 ==", "env.spec.timestep_limit: break returns.append(totalr) print('returns', returns) print('mean return', np.mean(returns)) print('std of", "- mean)/std) observations.append(obs) actions.append(action) obs, r, done, _ = env.step(action)", "import gym def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('expert_policy_file',", "main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('expert_policy_file', type=str) parser.add_argument('envname', type=str)", "env.render() if steps % 100 == 0: print(\"%i/%i\"%(steps, env.spec.timestep_limit)) if", "to load a policy and generate rollout data. Adapted from", "a policy and generate rollout data. Adapted from https://github.com/berkeleydeeprlcourse. Example", "= [] for i in range(args.num_rollouts): print('iter', i) obs =", "run_policy.py ../trained_policies/Humanoid-v1/policy_reward_11600/lin_policy_plus.npz Humanoid-v1 --render \\ --num_rollouts 20 \"\"\" import numpy", "type=int, default=20, help='Number of expert rollouts') args = parser.parse_args() print('loading", "args = parser.parse_args() print('loading and building expert policy') lin_policy =", "= np.dot(M, (obs - mean)/std) observations.append(obs) actions.append(action) obs, r, done,", "argparse parser = argparse.ArgumentParser() parser.add_argument('expert_policy_file', type=str) parser.add_argument('envname', type=str) parser.add_argument('--render', action='store_true')", "ARS. mean = lin_policy[1] std = lin_policy[2] env = gym.make(args.envname)", "estimated online by ARS. mean = lin_policy[1] std = lin_policy[2]", "False totalr = 0. steps = 0 while not done:", "gym def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('expert_policy_file', type=str)", "print('loading and building expert policy') lin_policy = np.load(args.expert_policy_file) lin_policy =", "\\ --num_rollouts 20 \"\"\" import numpy as np import gym", "load a policy and generate rollout data. Adapted from https://github.com/berkeleydeeprlcourse.", "--num_rollouts 20 \"\"\" import numpy as np import gym def", "np.mean(returns)) print('std of return', np.std(returns)) if __name__ == '__main__': main()", "= lin_policy[0] # mean and std of state vectors estimated", "help='Number of expert rollouts') args = parser.parse_args() print('loading and building", "% 100 == 0: print(\"%i/%i\"%(steps, env.spec.timestep_limit)) if steps >= env.spec.timestep_limit:", "type=str) parser.add_argument('--render', action='store_true') parser.add_argument('--num_rollouts', type=int, default=20, help='Number of expert rollouts')", "done = False totalr = 0. steps = 0 while", "= argparse.ArgumentParser() parser.add_argument('expert_policy_file', type=str) parser.add_argument('envname', type=str) parser.add_argument('--render', action='store_true') parser.add_argument('--num_rollouts', type=int,", "print(\"%i/%i\"%(steps, env.spec.timestep_limit)) if steps >= env.spec.timestep_limit: break returns.append(totalr) print('returns', returns)", "lin_policy[lin_policy.files[0]] M = lin_policy[0] # mean and std of state", "= lin_policy[2] env = gym.make(args.envname) returns = [] observations =", "100 == 0: print(\"%i/%i\"%(steps, env.spec.timestep_limit)) if steps >= env.spec.timestep_limit: break", "args.render: env.render() if steps % 100 == 0: print(\"%i/%i\"%(steps, env.spec.timestep_limit))", "expert policy') lin_policy = np.load(args.expert_policy_file) lin_policy = lin_policy[lin_policy.files[0]] M =", "vectors estimated online by ARS. mean = lin_policy[1] std =", "<reponame>kirk86/ARS \"\"\" Code to load a policy and generate rollout", "type=str) parser.add_argument('envname', type=str) parser.add_argument('--render', action='store_true') parser.add_argument('--num_rollouts', type=int, default=20, help='Number of", "obs = env.reset() done = False totalr = 0. steps", "= lin_policy[1] std = lin_policy[2] env = gym.make(args.envname) returns =", "# mean and std of state vectors estimated online by", "--render \\ --num_rollouts 20 \"\"\" import numpy as np import", "lin_policy[2] env = gym.make(args.envname) returns = [] observations = []", "= env.step(action) totalr += r steps += 1 if args.render:", "parser = argparse.ArgumentParser() parser.add_argument('expert_policy_file', type=str) parser.add_argument('envname', type=str) parser.add_argument('--render', action='store_true') parser.add_argument('--num_rollouts',", "break returns.append(totalr) print('returns', returns) print('mean return', np.mean(returns)) print('std of return',", "and building expert policy') lin_policy = np.load(args.expert_policy_file) lin_policy = lin_policy[lin_policy.files[0]]", "(obs - mean)/std) observations.append(obs) actions.append(action) obs, r, done, _ =", "state vectors estimated online by ARS. mean = lin_policy[1] std", "= lin_policy[lin_policy.files[0]] M = lin_policy[0] # mean and std of", "done, _ = env.step(action) totalr += r steps += 1", "env = gym.make(args.envname) returns = [] observations = [] actions", "not done: action = np.dot(M, (obs - mean)/std) observations.append(obs) actions.append(action)", "== 0: print(\"%i/%i\"%(steps, env.spec.timestep_limit)) if steps >= env.spec.timestep_limit: break returns.append(totalr)", "Adapted from https://github.com/berkeleydeeprlcourse. Example usage: python run_policy.py ../trained_policies/Humanoid-v1/policy_reward_11600/lin_policy_plus.npz Humanoid-v1 --render", "np.load(args.expert_policy_file) lin_policy = lin_policy[lin_policy.files[0]] M = lin_policy[0] # mean and", "lin_policy = lin_policy[lin_policy.files[0]] M = lin_policy[0] # mean and std", "for i in range(args.num_rollouts): print('iter', i) obs = env.reset() done", "python run_policy.py ../trained_policies/Humanoid-v1/policy_reward_11600/lin_policy_plus.npz Humanoid-v1 --render \\ --num_rollouts 20 \"\"\" import", "generate rollout data. Adapted from https://github.com/berkeleydeeprlcourse. Example usage: python run_policy.py", "rollout data. Adapted from https://github.com/berkeleydeeprlcourse. Example usage: python run_policy.py ../trained_policies/Humanoid-v1/policy_reward_11600/lin_policy_plus.npz", "as np import gym def main(): import argparse parser =", "= [] actions = [] for i in range(args.num_rollouts): print('iter',", "actions.append(action) obs, r, done, _ = env.step(action) totalr += r", "print('returns', returns) print('mean return', np.mean(returns)) print('std of return', np.std(returns)) if", "default=20, help='Number of expert rollouts') args = parser.parse_args() print('loading and", "lin_policy[0] # mean and std of state vectors estimated online", "policy and generate rollout data. Adapted from https://github.com/berkeleydeeprlcourse. Example usage:", "lin_policy[1] std = lin_policy[2] env = gym.make(args.envname) returns = []", "../trained_policies/Humanoid-v1/policy_reward_11600/lin_policy_plus.npz Humanoid-v1 --render \\ --num_rollouts 20 \"\"\" import numpy as", "+= r steps += 1 if args.render: env.render() if steps", "mean)/std) observations.append(obs) actions.append(action) obs, r, done, _ = env.step(action) totalr", "[] observations = [] actions = [] for i in", "steps += 1 if args.render: env.render() if steps % 100", "1 if args.render: env.render() if steps % 100 == 0:", "policy') lin_policy = np.load(args.expert_policy_file) lin_policy = lin_policy[lin_policy.files[0]] M = lin_policy[0]", "\"\"\" Code to load a policy and generate rollout data.", "lin_policy = np.load(args.expert_policy_file) lin_policy = lin_policy[lin_policy.files[0]] M = lin_policy[0] #", "env.reset() done = False totalr = 0. steps = 0", "parser.add_argument('--render', action='store_true') parser.add_argument('--num_rollouts', type=int, default=20, help='Number of expert rollouts') args", "steps = 0 while not done: action = np.dot(M, (obs", "print('iter', i) obs = env.reset() done = False totalr =", "mean = lin_policy[1] std = lin_policy[2] env = gym.make(args.envname) returns", "returns) print('mean return', np.mean(returns)) print('std of return', np.std(returns)) if __name__", "mean and std of state vectors estimated online by ARS.", "actions = [] for i in range(args.num_rollouts): print('iter', i) obs", "0 while not done: action = np.dot(M, (obs - mean)/std)", "obs, r, done, _ = env.step(action) totalr += r steps", "= gym.make(args.envname) returns = [] observations = [] actions =", "online by ARS. mean = lin_policy[1] std = lin_policy[2] env", "i in range(args.num_rollouts): print('iter', i) obs = env.reset() done =", "[] for i in range(args.num_rollouts): print('iter', i) obs = env.reset()", "= [] observations = [] actions = [] for i", "numpy as np import gym def main(): import argparse parser", "_ = env.step(action) totalr += r steps += 1 if", "steps >= env.spec.timestep_limit: break returns.append(totalr) print('returns', returns) print('mean return', np.mean(returns))", "Code to load a policy and generate rollout data. Adapted", "done: action = np.dot(M, (obs - mean)/std) observations.append(obs) actions.append(action) obs,", "env.spec.timestep_limit)) if steps >= env.spec.timestep_limit: break returns.append(totalr) print('returns', returns) print('mean", "returns.append(totalr) print('returns', returns) print('mean return', np.mean(returns)) print('std of return', np.std(returns))", "if args.render: env.render() if steps % 100 == 0: print(\"%i/%i\"%(steps,", "range(args.num_rollouts): print('iter', i) obs = env.reset() done = False totalr", "observations = [] actions = [] for i in range(args.num_rollouts):", "Example usage: python run_policy.py ../trained_policies/Humanoid-v1/policy_reward_11600/lin_policy_plus.npz Humanoid-v1 --render \\ --num_rollouts 20", "rollouts') args = parser.parse_args() print('loading and building expert policy') lin_policy", "[] actions = [] for i in range(args.num_rollouts): print('iter', i)", "data. Adapted from https://github.com/berkeleydeeprlcourse. Example usage: python run_policy.py ../trained_policies/Humanoid-v1/policy_reward_11600/lin_policy_plus.npz Humanoid-v1", "i) obs = env.reset() done = False totalr = 0.", "action = np.dot(M, (obs - mean)/std) observations.append(obs) actions.append(action) obs, r,", "def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('expert_policy_file', type=str) parser.add_argument('envname',", "np.dot(M, (obs - mean)/std) observations.append(obs) actions.append(action) obs, r, done, _", "totalr = 0. steps = 0 while not done: action", "observations.append(obs) actions.append(action) obs, r, done, _ = env.step(action) totalr +=", "building expert policy') lin_policy = np.load(args.expert_policy_file) lin_policy = lin_policy[lin_policy.files[0]] M", "r steps += 1 if args.render: env.render() if steps %", "if steps >= env.spec.timestep_limit: break returns.append(totalr) print('returns', returns) print('mean return',", "by ARS. mean = lin_policy[1] std = lin_policy[2] env =", "env.step(action) totalr += r steps += 1 if args.render: env.render()", "and generate rollout data. Adapted from https://github.com/berkeleydeeprlcourse. Example usage: python", "while not done: action = np.dot(M, (obs - mean)/std) observations.append(obs)", "parser.parse_args() print('loading and building expert policy') lin_policy = np.load(args.expert_policy_file) lin_policy", "parser.add_argument('--num_rollouts', type=int, default=20, help='Number of expert rollouts') args = parser.parse_args()", "action='store_true') parser.add_argument('--num_rollouts', type=int, default=20, help='Number of expert rollouts') args =", "if steps % 100 == 0: print(\"%i/%i\"%(steps, env.spec.timestep_limit)) if steps", "np import gym def main(): import argparse parser = argparse.ArgumentParser()", "r, done, _ = env.step(action) totalr += r steps +=", "usage: python run_policy.py ../trained_policies/Humanoid-v1/policy_reward_11600/lin_policy_plus.npz Humanoid-v1 --render \\ --num_rollouts 20 \"\"\"", "= False totalr = 0. steps = 0 while not" ]
[ "* A_SC * R_body) # Build color lines to internal", "import norm class TisserandKind(Enum): \"\"\"All possible Tisserand kinds\"\"\" APSIS =", "desired kind lines if self.kind == TisserandKind.APSIS: # Generate apsis", "\"\"\"Generates Tisserand figures\"\"\" def __init__(self, kind=TisserandKind.APSIS, axes=None): \"\"\"Object initializer Parameters", "flyby speed alpha_lim : tuple Minimum and maximum flyby angles.", "the default plotting option \"\"\" # Solve Tisserand parameters RR_P,", "the figure \"\"\" # Asign Tisserand kind self.kind = kind", "poliastro.twobody.mean_elements import get_mean_elements from poliastro.util import norm class TisserandKind(Enum): \"\"\"All", "\"\"\"Plots body Tisserand for given amount of solutions within Vinf", "list Plotting lines for the Tisserand \"\"\" # Plot desired", "self.ax.set_yscale(\"log\") def _solve_tisserand( self, body, vinf_span, num_contours, alpha_lim=(0, np.pi), N=100", "EE, TT = self._solve_tisserand(body, vinf_span, num_contours) # Check if color", "# Force axes scale regarding Tisserand kind self.ax.set_xscale(\"log\") if self.kind", "alpha span vinf_array = np.linspace(vinf_span[0], vinf_span[-1], num_contours) alpha_array = np.linspace(alpha_lim[0],", ": TisserandKind Nature for the Tisserand axes : ~matplotlib.pyplot.axes Axes", "/ (2)) ** 2) # Compute main Tisserand variables RR_P", "axes # Force axes scale regarding Tisserand kind self.ax.set_xscale(\"log\") if", "= plt.subplots(1, 1) else: self.ax = axes # Force axes", "* np.pi * np.sqrt((A_SC * R_body) ** 3 / body.parent.k)", "angles. N : int Number of points for flyby angle.", "== TisserandKind.APSIS: # Generate apsis lines lines = self.ax.plot(RR_A.to(u.AU), RR_P.to(u.AU),", "body.parent.k) EE = -body.parent.k / (2 * A_SC * R_body)", "V_body # Construct the mesh for any configuration V_INF, ALPHA", "self.ax.plot(RR_A.to(u.AU), RR_P.to(u.AU), color=color) elif self.kind == TisserandKind.ENERGY: # Generate energy", "self.kind == TisserandKind.PERIOD: # Generate period lines lines = self.ax.plot(RR_P.to(u.AU),", "of a Mission to Enceladus\" by David <NAME>, section 3.6", "TisserandKind.ENERGY: # Generate energy lines lines = self.ax.plot( RR_P.to(u.AU), EE.to(u.km", "- 2 * V_INF * np.cos(ALPHA)) ECC_SC = np.sqrt(1 -", "elements Earth body_rv = get_mean_elements(body).to_vectors() R_body, V_body = norm(body_rv.r), norm(body_rv.v)", "V_INF, ALPHA = np.meshgrid(vinf_array, alpha_array) # Solving for non-dimensional a_sc", "Apsis tisserand is the default plotting option \"\"\" # Solve", "self.ax = axes # Force axes scale regarding Tisserand kind", ") # Check if color defined if not color: color", "tuple Minimum and maximum Vinf velocities num_contours : int Number", "return self.ax def plot(self, body, vinf_span, num_contours=10, color=None): \"\"\"Plots body", "Nature for the Tisserand axes : ~matplotlib.pyplot.axes Axes for the", "velocity line alpha_lim : tuple Minimum and maximum flyby angles", "kind self.kind = kind # Check if axis available if", "= get_mean_elements(body).to_vectors() R_body, V_body = norm(body_rv.r), norm(body_rv.v) # Generate non-dimensional", "astropy import units as u from matplotlib import pyplot as", "N) vinf_array /= V_body # Construct the mesh for any", "axes: _, self.ax = plt.subplots(1, 1) else: self.ax = axes", "within Vinf span Parameters ---------- body : ~poliastro.bodies.Body Body to", "import pyplot as plt from poliastro.plotting._base import BODY_COLORS from poliastro.twobody.mean_elements", "self.ax.set_xscale(\"log\") if self.kind == TisserandKind.APSIS: self.ax.set_yscale(\"log\") def _solve_tisserand( self, body,", "Tisserand kind self.kind = kind # Check if axis available", "RR_P, RR_A, EE, TT, color): \"\"\"Collect lines and append them", "body Tisserand line within flyby angle Parameters ---------- body :", "# Asign Tisserand kind self.kind = kind # Check if", "self.kind == TisserandKind.APSIS: self.ax.set_yscale(\"log\") def _solve_tisserand( self, body, vinf_span, num_contours,", "= self._solve_tisserand( body, vinf_span, num_contours=2, alpha_lim=alpha_lim ) # Check if", "and maximum Vinf velocities num_contours : int Number of points", "/= V_body # Construct the mesh for any configuration V_INF,", "ECC_SC = np.sqrt(1 - 1 / A_SC * ((3 -", "color defined if not color: color = BODY_COLORS[body.name] # Build", "np.pi * np.sqrt((A_SC * R_body) ** 3 / body.parent.k) EE", "data : list Array containing [RR_P, RR_A, EE, TT, color]", "= np.sqrt(1 - 1 / A_SC * ((3 - 1", "not color: color = BODY_COLORS[body.name] # Build canvas lines from", "* R_body * (1 - ECC_SC) RR_A = A_SC *", "+ ECC_SC) TT = 2 * np.pi * np.sqrt((A_SC *", "int Number of contour lines for flyby speed alpha_lim :", "body : ~poliastro.bodies.Body Body to be plotted Tisserand vinf_span :", "2 * V_INF * np.cos(ALPHA)) ECC_SC = np.sqrt(1 - 1", "Vinf velocity line alpha_lim : tuple Minimum and maximum flyby", "= A_SC * R_body * (1 + ECC_SC) TT =", "kind : TisserandKind Nature for the Tisserand axes : ~matplotlib.pyplot.axes", "Tisserand variables RR_P = A_SC * R_body * (1 -", "Build canvas lines from Tisserand parameters self._build_lines(RR_P, RR_A, EE, TT,", "Tisserand figures\"\"\" def __init__(self, kind=TisserandKind.APSIS, axes=None): \"\"\"Object initializer Parameters ----------", "available if not axes: _, self.ax = plt.subplots(1, 1) else:", "Asign Tisserand kind self.kind = kind # Check if axis", "internal data Parameters ---------- data : list Array containing [RR_P,", "Parameters ---------- body : ~poliastro.bodies.Body Body to be plotted Tisserand", "np.meshgrid(vinf_array, alpha_array) # Solving for non-dimensional a_sc and ecc_sc A_SC", "self.ax.plot( RR_P.to(u.AU), EE.to(u.km ** 2 / u.s ** 2), color=color", "num_contours=2, alpha_lim=alpha_lim ) # Check if color defined if not", "import BODY_COLORS from poliastro.twobody.mean_elements import get_mean_elements from poliastro.util import norm", "APSIS = \"apsis\" ENERGY = \"energy\" PERIOD = \"period\" class", "self.kind == TisserandKind.ENERGY: # Generate energy lines lines = self.ax.plot(", "for the Tisserand axes : ~matplotlib.pyplot.axes Axes for the figure", "non-dimensional a_sc and ecc_sc A_SC = 1 / np.abs(1 -", "to Enceladus\" by David <NAME>, section 3.6 \"\"\" # Generate", "one depicted in \"Preliminary Trajectory Design of a Mission to", "Generate non-dimensional velocity and alpha span vinf_array = np.linspace(vinf_span[0], vinf_span[-1],", "color=color ) elif self.kind == TisserandKind.PERIOD: # Generate period lines", "Generate period lines lines = self.ax.plot(RR_P.to(u.AU), TT.to(u.year), color=color) return lines", "else: self.ax = axes # Force axes scale regarding Tisserand", "TT = 2 * np.pi * np.sqrt((A_SC * R_body) **", "and append them to internal data Parameters ---------- data :", "Tisserand kind self.ax.set_xscale(\"log\") if self.kind == TisserandKind.APSIS: self.ax.set_yscale(\"log\") def _solve_tisserand(", "----- The algorithm for generating Tisserand plots is the one", "vinf) # Solve Tisserand parameters RR_P, RR_A, EE, TT =", "vinf_array = np.linspace(vinf_span[0], vinf_span[-1], num_contours) alpha_array = np.linspace(alpha_lim[0], alpha_lim[-1], N)", "V_body = norm(body_rv.r), norm(body_rv.v) # Generate non-dimensional velocity and alpha", "2) # Compute main Tisserand variables RR_P = A_SC *", "= A_SC * R_body * (1 - ECC_SC) RR_A =", "- ECC_SC) RR_A = A_SC * R_body * (1 +", "u.s ** 2), color=color ) elif self.kind == TisserandKind.PERIOD: #", "Notes ----- The algorithm for generating Tisserand plots is the", "lines lines = self.ax.plot(RR_P.to(u.AU), TT.to(u.year), color=color) return lines def plot_line(self,", "# Plot desired kind lines if self.kind == TisserandKind.APSIS: #", ": ~astropy.units.Quantity Desired Vinf for the flyby num_contours : int", "1 / A_SC * ((3 - 1 / A_SC -", "R_body * (1 - ECC_SC) RR_A = A_SC * R_body", "for the color lines Returns ------- self.ax: ~matplotlib.axes.Axes Apsis tisserand", "R_body) ** 3 / body.parent.k) EE = -body.parent.k / (2", "Vinf span Parameters ---------- body : ~poliastro.bodies.Body Body to be", "\"\"\"Object initializer Parameters ---------- kind : TisserandKind Nature for the", "the one depicted in \"Preliminary Trajectory Design of a Mission", "Returns ------- lines: list Plotting lines for the Tisserand \"\"\"", "mesh for any configuration V_INF, ALPHA = np.meshgrid(vinf_array, alpha_array) #", "vinf_array /= V_body # Construct the mesh for any configuration", "= BODY_COLORS[body.name] # Build canvas lines from Tisserand parameters self._build_lines(RR_P,", "np.cos(ALPHA)) ECC_SC = np.sqrt(1 - 1 / A_SC * ((3", "from poliastro.twobody.mean_elements import get_mean_elements from poliastro.util import norm class TisserandKind(Enum):", "RR_A, EE, TT = self._solve_tisserand(body, vinf_span, num_contours) # Check if", "into a tuple vinf_span = (vinf, vinf) # Solve Tisserand", "vinf_span, num_contours, alpha_lim=(0, np.pi), N=100 ): \"\"\"Solves all possible Tisserand", "= np.linspace(vinf_span[0], vinf_span[-1], num_contours) alpha_array = np.linspace(alpha_lim[0], alpha_lim[-1], N) vinf_array", "Earth body_rv = get_mean_elements(body).to_vectors() R_body, V_body = norm(body_rv.r), norm(body_rv.v) #", "points for flyby angle. Notes ----- The algorithm for generating", "num_contours, alpha_lim=(0, np.pi), N=100 ): \"\"\"Solves all possible Tisserand lines", "Number of points for flyby angle. Notes ----- The algorithm", "Axes for the figure \"\"\" # Asign Tisserand kind self.kind", "** 2) / (2)) ** 2) # Compute main Tisserand", "solutions within Vinf span Parameters ---------- body : ~poliastro.bodies.Body Body", "RR_A, EE, TT def _build_lines(self, RR_P, RR_A, EE, TT, color):", "Tisserand lines with a meshgrid workflow Parameters ---------- body :", "import Enum import numpy as np from astropy import units", "lines with a meshgrid workflow Parameters ---------- body : ~poliastro.bodies.Body", "Desired Vinf for the flyby num_contours : int Number of", "RR_A, EE, TT, color): \"\"\"Collect lines and append them to", "~poliastro.bodies.Body Body to be plotted Tisserand vinf_array : ~astropy.units.Quantity Desired", "\"\"\" # Generate mean orbital elements Earth body_rv = get_mean_elements(body).to_vectors()", "np.abs(1 - V_INF ** 2 - 2 * V_INF *", "lines = self.ax.plot(RR_P.to(u.AU), TT.to(u.year), color=color) return lines def plot_line(self, body,", "representing for the color lines Returns ------- self.ax: ~matplotlib.axes.Axes Apsis", "flyby angles. N : int Number of points for flyby", "= np.linspace(alpha_lim[0], alpha_lim[-1], N) vinf_array /= V_body # Construct the", "Parameters ---------- data : list Array containing [RR_P, RR_A, EE,", "- V_INF ** 2) / (2)) ** 2) # Compute", "body Tisserand for given amount of solutions within Vinf span", "import numpy as np from astropy import units as u", "def plot(self, body, vinf_span, num_contours=10, color=None): \"\"\"Plots body Tisserand for", "self, body, vinf_span, num_contours, alpha_lim=(0, np.pi), N=100 ): \"\"\"Solves all", "Generate mean orbital elements Earth body_rv = get_mean_elements(body).to_vectors() R_body, V_body", "apsis lines lines = self.ax.plot(RR_A.to(u.AU), RR_P.to(u.AU), color=color) elif self.kind ==", "RR_A, EE, TT, color) return self.ax def plot(self, body, vinf_span,", "Tisserand parameters RR_P, RR_A, EE, TT = self._solve_tisserand( body, vinf_span,", "lines to internal canvas return RR_P, RR_A, EE, TT def", "alpha_lim : tuple Minimum and maximum flyby angles. N :", "~astropy.units.Quantity Vinf velocity line alpha_lim : tuple Minimum and maximum", "alpha_lim[-1], N) vinf_array /= V_body # Construct the mesh for", "color): \"\"\"Collect lines and append them to internal data Parameters", "3 / body.parent.k) EE = -body.parent.k / (2 * A_SC", "TT = self._solve_tisserand(body, vinf_span, num_contours) # Check if color defined", "EE, TT, color): \"\"\"Collect lines and append them to internal", ") elif self.kind == TisserandKind.PERIOD: # Generate period lines lines", "# Solve Tisserand parameters RR_P, RR_A, EE, TT = self._solve_tisserand(", "color=color) elif self.kind == TisserandKind.ENERGY: # Generate energy lines lines", "TT, color): \"\"\"Collect lines and append them to internal data", "Tisserand \"\"\" # Plot desired kind lines if self.kind ==", "velocities color : str String representing for the color lines", "~matplotlib.axes.Axes Apsis tisserand is the default plotting option \"\"\" #", "all possible Tisserand lines with a meshgrid workflow Parameters ----------", "# Generate energy lines lines = self.ax.plot( RR_P.to(u.AU), EE.to(u.km **", "EE, TT, color] Returns ------- lines: list Plotting lines for", "np.pi), color=None): \"\"\"Plots body Tisserand line within flyby angle Parameters", "norm class TisserandKind(Enum): \"\"\"All possible Tisserand kinds\"\"\" APSIS = \"apsis\"", "R_body) # Build color lines to internal canvas return RR_P,", "tuple Minimum and maximum flyby angles. N : int Number", "self.ax.plot(RR_P.to(u.AU), TT.to(u.year), color=color) return lines def plot_line(self, body, vinf, alpha_lim=(0,", "plt.subplots(1, 1) else: self.ax = axes # Force axes scale", "int Number of points to iterate over previously defined velocities", "= axes # Force axes scale regarding Tisserand kind self.ax.set_xscale(\"log\")", "of solutions within Vinf span Parameters ---------- body : ~poliastro.bodies.Body", "Generates Tisserand plots \"\"\" from enum import Enum import numpy", "** 2), color=color ) elif self.kind == TisserandKind.PERIOD: # Generate", "figure \"\"\" # Asign Tisserand kind self.kind = kind #", ": ~matplotlib.pyplot.axes Axes for the figure \"\"\" # Asign Tisserand", "Check if axis available if not axes: _, self.ax =", "A_SC * R_body) # Build color lines to internal canvas", ": ~poliastro.bodies.Body Body to be plotted Tisserand vinf_array : ~astropy.units.Quantity", "reuse Tisserand solver, we transform input Vinf into a tuple", "over previously defined velocities color : str String representing for", "lines and append them to internal data Parameters ---------- data", "plotting option \"\"\" # HACK: to reuse Tisserand solver, we", "if axis available if not axes: _, self.ax = plt.subplots(1,", "for the Tisserand \"\"\" # Plot desired kind lines if", "plotted Tisserand vinf_span : tuple Minimum and maximum Vinf velocities", "Tisserand for given amount of solutions within Vinf span Parameters", "class TisserandPlotter: \"\"\"Generates Tisserand figures\"\"\" def __init__(self, kind=TisserandKind.APSIS, axes=None): \"\"\"Object", "\"\"\" # Solve Tisserand parameters RR_P, RR_A, EE, TT =", "lines: list Plotting lines for the Tisserand \"\"\" # Plot", "alpha_lim : tuple Minimum and maximum flyby angles color :", "------- lines: list Plotting lines for the Tisserand \"\"\" #", "_, self.ax = plt.subplots(1, 1) else: self.ax = axes #", "matplotlib import pyplot as plt from poliastro.plotting._base import BODY_COLORS from", "ECC_SC) RR_A = A_SC * R_body * (1 + ECC_SC)", "= self._solve_tisserand(body, vinf_span, num_contours) # Check if color defined if", "mean orbital elements Earth body_rv = get_mean_elements(body).to_vectors() R_body, V_body =", "def plot_line(self, body, vinf, alpha_lim=(0, np.pi), color=None): \"\"\"Plots body Tisserand", "vinf : ~astropy.units.Quantity Vinf velocity line alpha_lim : tuple Minimum", "poliastro.plotting._base import BODY_COLORS from poliastro.twobody.mean_elements import get_mean_elements from poliastro.util import", "def _build_lines(self, RR_P, RR_A, EE, TT, color): \"\"\"Collect lines and", "ecc_sc A_SC = 1 / np.abs(1 - V_INF ** 2", "maximum flyby angles. N : int Number of points for", "# Check if axis available if not axes: _, self.ax", "lines = self.ax.plot( RR_P.to(u.AU), EE.to(u.km ** 2 / u.s **", "# HACK: to reuse Tisserand solver, we transform input Vinf", "color: color = BODY_COLORS[body.name] # Build canvas lines from Tisserand", "* R_body * (1 + ECC_SC) TT = 2 *", "~matplotlib.pyplot.axes Axes for the figure \"\"\" # Asign Tisserand kind", "plot(self, body, vinf_span, num_contours=10, color=None): \"\"\"Plots body Tisserand for given", "self._solve_tisserand(body, vinf_span, num_contours) # Check if color defined if not", "* (1 - ECC_SC) RR_A = A_SC * R_body *", "Number of points to iterate over previously defined velocities color", "defined velocities color : str String representing for the color", "any configuration V_INF, ALPHA = np.meshgrid(vinf_array, alpha_array) # Solving for", "---------- body : ~poliastro.bodies.Body Body to be plotted Tisserand vinf", "---------- kind : TisserandKind Nature for the Tisserand axes :", "to be plotted Tisserand vinf_span : tuple Minimum and maximum", "if self.kind == TisserandKind.APSIS: self.ax.set_yscale(\"log\") def _solve_tisserand( self, body, vinf_span,", "axes scale regarding Tisserand kind self.ax.set_xscale(\"log\") if self.kind == TisserandKind.APSIS:", "vinf_span : tuple Minimum and maximum Vinf velocities num_contours :", "/ u.s ** 2), color=color ) elif self.kind == TisserandKind.PERIOD:", "self.ax def plot(self, body, vinf_span, num_contours=10, color=None): \"\"\"Plots body Tisserand", "------- self.ax: ~matplotlib.axes.Axes Apsis tisserand is the default plotting option", "import units as u from matplotlib import pyplot as plt", "# Generate mean orbital elements Earth body_rv = get_mean_elements(body).to_vectors() R_body,", "2 - 2 * V_INF * np.cos(ALPHA)) ECC_SC = np.sqrt(1", "2), color=color ) elif self.kind == TisserandKind.PERIOD: # Generate period", "self.kind = kind # Check if axis available if not", "Tisserand parameters RR_P, RR_A, EE, TT = self._solve_tisserand(body, vinf_span, num_contours)", "the mesh for any configuration V_INF, ALPHA = np.meshgrid(vinf_array, alpha_array)", "= norm(body_rv.r), norm(body_rv.v) # Generate non-dimensional velocity and alpha span", "input Vinf into a tuple vinf_span = (vinf, vinf) #", "_solve_tisserand( self, body, vinf_span, num_contours, alpha_lim=(0, np.pi), N=100 ): \"\"\"Solves", "norm(body_rv.v) # Generate non-dimensional velocity and alpha span vinf_array =", "plots \"\"\" from enum import Enum import numpy as np", "the flyby num_contours : int Number of contour lines for", ": int Number of contour lines for flyby speed alpha_lim", "if color defined if not color: color = BODY_COLORS[body.name] #", "get_mean_elements(body).to_vectors() R_body, V_body = norm(body_rv.r), norm(body_rv.v) # Generate non-dimensional velocity", "the Tisserand axes : ~matplotlib.pyplot.axes Axes for the figure \"\"\"", "axis available if not axes: _, self.ax = plt.subplots(1, 1)", "/ body.parent.k) EE = -body.parent.k / (2 * A_SC *", "ENERGY = \"energy\" PERIOD = \"period\" class TisserandPlotter: \"\"\"Generates Tisserand", "poliastro.util import norm class TisserandKind(Enum): \"\"\"All possible Tisserand kinds\"\"\" APSIS", "alpha_lim=alpha_lim ) # Check if color defined if not color:", "---------- body : ~poliastro.bodies.Body Body to be plotted Tisserand vinf_span", "axes : ~matplotlib.pyplot.axes Axes for the figure \"\"\" # Asign", "np.sqrt((A_SC * R_body) ** 3 / body.parent.k) EE = -body.parent.k", "Body to be plotted Tisserand vinf_span : tuple Minimum and", "2 * np.pi * np.sqrt((A_SC * R_body) ** 3 /", "to internal data Parameters ---------- data : list Array containing", ": int Number of points for flyby angle. Notes -----", "initializer Parameters ---------- kind : TisserandKind Nature for the Tisserand", "---------- data : list Array containing [RR_P, RR_A, EE, TT,", "# Generate apsis lines lines = self.ax.plot(RR_A.to(u.AU), RR_P.to(u.AU), color=color) elif", "\"apsis\" ENERGY = \"energy\" PERIOD = \"period\" class TisserandPlotter: \"\"\"Generates", "a meshgrid workflow Parameters ---------- body : ~poliastro.bodies.Body Body to", ": ~poliastro.bodies.Body Body to be plotted Tisserand vinf_span : tuple", "str String representing for the color lines Returns ------- self.ax:", "flyby angle Parameters ---------- body : ~poliastro.bodies.Body Body to be", "for given amount of solutions within Vinf span Parameters ----------", "Tisserand plots \"\"\" from enum import Enum import numpy as", "Tisserand vinf_array : ~astropy.units.Quantity Desired Vinf for the flyby num_contours", "* V_INF * np.cos(ALPHA)) ECC_SC = np.sqrt(1 - 1 /", "numpy as np from astropy import units as u from", "# Check if color defined if not color: color =", "RR_P, RR_A, EE, TT = self._solve_tisserand(body, vinf_span, num_contours) # Check", "# Construct the mesh for any configuration V_INF, ALPHA =", "Design of a Mission to Enceladus\" by David <NAME>, section", "Vinf for the flyby num_contours : int Number of contour", "from poliastro.util import norm class TisserandKind(Enum): \"\"\"All possible Tisserand kinds\"\"\"", "vinf, alpha_lim=(0, np.pi), color=None): \"\"\"Plots body Tisserand line within flyby", "Parameters ---------- kind : TisserandKind Nature for the Tisserand axes", "Tisserand line within flyby angle Parameters ---------- body : ~poliastro.bodies.Body", "iterate over previously defined velocities color : str String representing", "Tisserand plots is the one depicted in \"Preliminary Trajectory Design", ": list Array containing [RR_P, RR_A, EE, TT, color] Returns", "(1 + ECC_SC) TT = 2 * np.pi * np.sqrt((A_SC", "== TisserandKind.ENERGY: # Generate energy lines lines = self.ax.plot( RR_P.to(u.AU),", "RR_A, EE, TT, color] Returns ------- lines: list Plotting lines", "flyby angle. Notes ----- The algorithm for generating Tisserand plots", "defined if not color: color = BODY_COLORS[body.name] # Build canvas", "alpha_array = np.linspace(alpha_lim[0], alpha_lim[-1], N) vinf_array /= V_body # Construct", "A_SC = 1 / np.abs(1 - V_INF ** 2 -", "Solving for non-dimensional a_sc and ecc_sc A_SC = 1 /", "(2 * A_SC * R_body) # Build color lines to", "within flyby angle Parameters ---------- body : ~poliastro.bodies.Body Body to", "possible Tisserand kinds\"\"\" APSIS = \"apsis\" ENERGY = \"energy\" PERIOD", "TisserandKind.APSIS: # Generate apsis lines lines = self.ax.plot(RR_A.to(u.AU), RR_P.to(u.AU), color=color)", "RR_P, RR_A, EE, TT = self._solve_tisserand( body, vinf_span, num_contours=2, alpha_lim=alpha_lim", "TisserandKind Nature for the Tisserand axes : ~matplotlib.pyplot.axes Axes for", "= kind # Check if axis available if not axes:", "from astropy import units as u from matplotlib import pyplot", "angle. Notes ----- The algorithm for generating Tisserand plots is", "color=color) return lines def plot_line(self, body, vinf, alpha_lim=(0, np.pi), color=None):", "Enum import numpy as np from astropy import units as", "# Compute main Tisserand variables RR_P = A_SC * R_body", "/ A_SC - V_INF ** 2) / (2)) ** 2)", "-body.parent.k / (2 * A_SC * R_body) # Build color", "= self.ax.plot(RR_P.to(u.AU), TT.to(u.year), color=color) return lines def plot_line(self, body, vinf,", "be plotted Tisserand vinf_array : ~astropy.units.Quantity Desired Vinf for the", "RR_P.to(u.AU), color=color) elif self.kind == TisserandKind.ENERGY: # Generate energy lines", "figures\"\"\" def __init__(self, kind=TisserandKind.APSIS, axes=None): \"\"\"Object initializer Parameters ---------- kind", "color lines Returns ------- self.ax: ~matplotlib.axes.Axes Apsis tisserand is the", "# Solve Tisserand parameters RR_P, RR_A, EE, TT = self._solve_tisserand(body,", "Body to be plotted Tisserand vinf_array : ~astropy.units.Quantity Desired Vinf", "\"Preliminary Trajectory Design of a Mission to Enceladus\" by David", "canvas return RR_P, RR_A, EE, TT def _build_lines(self, RR_P, RR_A,", "alpha_lim=(0, np.pi), color=None): \"\"\"Plots body Tisserand line within flyby angle", "points to iterate over previously defined velocities color : str", "kind=TisserandKind.APSIS, axes=None): \"\"\"Object initializer Parameters ---------- kind : TisserandKind Nature", "BODY_COLORS from poliastro.twobody.mean_elements import get_mean_elements from poliastro.util import norm class", "String representing for the color lines Returns ------- self.ax: ~matplotlib.axes.Axes", ": int Number of points to iterate over previously defined", "- V_INF ** 2 - 2 * V_INF * np.cos(ALPHA))", "lines lines = self.ax.plot( RR_P.to(u.AU), EE.to(u.km ** 2 / u.s", "2) / (2)) ** 2) # Compute main Tisserand variables", "Enceladus\" by David <NAME>, section 3.6 \"\"\" # Generate mean", "Tisserand vinf_span : tuple Minimum and maximum Vinf velocities num_contours", "flyby angles color : str String representing for the color", "A_SC * ((3 - 1 / A_SC - V_INF **", "Generate energy lines lines = self.ax.plot( RR_P.to(u.AU), EE.to(u.km ** 2", "Construct the mesh for any configuration V_INF, ALPHA = np.meshgrid(vinf_array,", "np.linspace(alpha_lim[0], alpha_lim[-1], N) vinf_array /= V_body # Construct the mesh", "plots is the one depicted in \"Preliminary Trajectory Design of", "pyplot as plt from poliastro.plotting._base import BODY_COLORS from poliastro.twobody.mean_elements import", "self._build_lines(RR_P, RR_A, EE, TT, color) return self.ax def plot(self, body,", "Mission to Enceladus\" by David <NAME>, section 3.6 \"\"\" #", "for the flyby num_contours : int Number of contour lines", "---------- body : ~poliastro.bodies.Body Body to be plotted Tisserand vinf_array", ": ~astropy.units.Quantity Vinf velocity line alpha_lim : tuple Minimum and", "color = BODY_COLORS[body.name] # Build canvas lines from Tisserand parameters", "TisserandKind(Enum): \"\"\"All possible Tisserand kinds\"\"\" APSIS = \"apsis\" ENERGY =", "alpha_lim=(0, np.pi), N=100 ): \"\"\"Solves all possible Tisserand lines with", "EE.to(u.km ** 2 / u.s ** 2), color=color ) elif", "Vinf into a tuple vinf_span = (vinf, vinf) # Solve", "default plotting option \"\"\" # Solve Tisserand parameters RR_P, RR_A,", "RR_P, RR_A, EE, TT def _build_lines(self, RR_P, RR_A, EE, TT,", "lines lines = self.ax.plot(RR_A.to(u.AU), RR_P.to(u.AU), color=color) elif self.kind == TisserandKind.ENERGY:", "from matplotlib import pyplot as plt from poliastro.plotting._base import BODY_COLORS", "# Solving for non-dimensional a_sc and ecc_sc A_SC = 1", "for the figure \"\"\" # Asign Tisserand kind self.kind =", "<NAME>, section 3.6 \"\"\" # Generate mean orbital elements Earth", "of points to iterate over previously defined velocities color :", "V_INF ** 2) / (2)) ** 2) # Compute main", "and ecc_sc A_SC = 1 / np.abs(1 - V_INF **", "ECC_SC) TT = 2 * np.pi * np.sqrt((A_SC * R_body)", "(vinf, vinf) # Solve Tisserand parameters RR_P, RR_A, EE, TT", "# Generate period lines lines = self.ax.plot(RR_P.to(u.AU), TT.to(u.year), color=color) return", "# Build color lines to internal canvas return RR_P, RR_A,", "with a meshgrid workflow Parameters ---------- body : ~poliastro.bodies.Body Body", "non-dimensional velocity and alpha span vinf_array = np.linspace(vinf_span[0], vinf_span[-1], num_contours)", "Check if color defined if not color: color = BODY_COLORS[body.name]", "main Tisserand variables RR_P = A_SC * R_body * (1", "section 3.6 \"\"\" # Generate mean orbital elements Earth body_rv", "elif self.kind == TisserandKind.PERIOD: # Generate period lines lines =", "is the default plotting option \"\"\" # Solve Tisserand parameters", "num_contours : int Number of contour lines for flyby speed", "from poliastro.plotting._base import BODY_COLORS from poliastro.twobody.mean_elements import get_mean_elements from poliastro.util", "append them to internal data Parameters ---------- data : list", "class TisserandKind(Enum): \"\"\"All possible Tisserand kinds\"\"\" APSIS = \"apsis\" ENERGY", "TT.to(u.year), color=color) return lines def plot_line(self, body, vinf, alpha_lim=(0, np.pi),", "tisserand is the default plotting option \"\"\" # HACK: to", "be plotted Tisserand vinf_span : tuple Minimum and maximum Vinf", "- 1 / A_SC - V_INF ** 2) / (2))", "get_mean_elements from poliastro.util import norm class TisserandKind(Enum): \"\"\"All possible Tisserand", "body, vinf_span, num_contours=10, color=None): \"\"\"Plots body Tisserand for given amount", "variables RR_P = A_SC * R_body * (1 - ECC_SC)", "HACK: to reuse Tisserand solver, we transform input Vinf into", "R_body * (1 + ECC_SC) TT = 2 * np.pi", "** 2) # Compute main Tisserand variables RR_P = A_SC", "\"\"\" Generates Tisserand plots \"\"\" from enum import Enum import", "list Array containing [RR_P, RR_A, EE, TT, color] Returns -------", "tisserand is the default plotting option \"\"\" # Solve Tisserand", "int Number of points for flyby angle. Notes ----- The", "np.sqrt(1 - 1 / A_SC * ((3 - 1 /", "the default plotting option \"\"\" # HACK: to reuse Tisserand", "maximum Vinf velocities num_contours : int Number of points to", "= np.meshgrid(vinf_array, alpha_array) # Solving for non-dimensional a_sc and ecc_sc", "Vinf velocities num_contours : int Number of points to iterate", "Minimum and maximum flyby angles. N : int Number of", "self.kind == TisserandKind.APSIS: # Generate apsis lines lines = self.ax.plot(RR_A.to(u.AU),", "for flyby speed alpha_lim : tuple Minimum and maximum flyby", "flyby num_contours : int Number of contour lines for flyby", "velocities num_contours : int Number of points to iterate over", "= 1 / np.abs(1 - V_INF ** 2 - 2", "be plotted Tisserand vinf : ~astropy.units.Quantity Vinf velocity line alpha_lim", "period lines lines = self.ax.plot(RR_P.to(u.AU), TT.to(u.year), color=color) return lines def", "plt from poliastro.plotting._base import BODY_COLORS from poliastro.twobody.mean_elements import get_mean_elements from", "== TisserandKind.APSIS: self.ax.set_yscale(\"log\") def _solve_tisserand( self, body, vinf_span, num_contours, alpha_lim=(0,", "for non-dimensional a_sc and ecc_sc A_SC = 1 / np.abs(1", "amount of solutions within Vinf span Parameters ---------- body :", "Generate apsis lines lines = self.ax.plot(RR_A.to(u.AU), RR_P.to(u.AU), color=color) elif self.kind", "kind # Check if axis available if not axes: _,", "EE = -body.parent.k / (2 * A_SC * R_body) #", "3.6 \"\"\" # Generate mean orbital elements Earth body_rv =", "* (1 + ECC_SC) TT = 2 * np.pi *", "to reuse Tisserand solver, we transform input Vinf into a", "* ((3 - 1 / A_SC - V_INF ** 2)", "TisserandKind.PERIOD: # Generate period lines lines = self.ax.plot(RR_P.to(u.AU), TT.to(u.year), color=color)", "we transform input Vinf into a tuple vinf_span = (vinf,", "color=None): \"\"\"Plots body Tisserand line within flyby angle Parameters ----------", "meshgrid workflow Parameters ---------- body : ~poliastro.bodies.Body Body to be", "a Mission to Enceladus\" by David <NAME>, section 3.6 \"\"\"", "Tisserand axes : ~matplotlib.pyplot.axes Axes for the figure \"\"\" #", "/ A_SC * ((3 - 1 / A_SC - V_INF", "norm(body_rv.r), norm(body_rv.v) # Generate non-dimensional velocity and alpha span vinf_array", "vinf_span, num_contours=10, color=None): \"\"\"Plots body Tisserand for given amount of", ": tuple Minimum and maximum flyby angles. N : int", "return RR_P, RR_A, EE, TT def _build_lines(self, RR_P, RR_A, EE,", "by David <NAME>, section 3.6 \"\"\" # Generate mean orbital", "(2)) ** 2) # Compute main Tisserand variables RR_P =", "possible Tisserand lines with a meshgrid workflow Parameters ---------- body", "if not axes: _, self.ax = plt.subplots(1, 1) else: self.ax", "parameters RR_P, RR_A, EE, TT = self._solve_tisserand( body, vinf_span, num_contours=2,", "\"period\" class TisserandPlotter: \"\"\"Generates Tisserand figures\"\"\" def __init__(self, kind=TisserandKind.APSIS, axes=None):", "self._solve_tisserand( body, vinf_span, num_contours=2, alpha_lim=alpha_lim ) # Check if color", "# Generate non-dimensional velocity and alpha span vinf_array = np.linspace(vinf_span[0],", "velocity and alpha span vinf_array = np.linspace(vinf_span[0], vinf_span[-1], num_contours) alpha_array", "= self.ax.plot( RR_P.to(u.AU), EE.to(u.km ** 2 / u.s ** 2),", "= (vinf, vinf) # Solve Tisserand parameters RR_P, RR_A, EE,", "= self.ax.plot(RR_A.to(u.AU), RR_P.to(u.AU), color=color) elif self.kind == TisserandKind.ENERGY: # Generate", "lines = self.ax.plot(RR_A.to(u.AU), RR_P.to(u.AU), color=color) elif self.kind == TisserandKind.ENERGY: #", "TisserandPlotter: \"\"\"Generates Tisserand figures\"\"\" def __init__(self, kind=TisserandKind.APSIS, axes=None): \"\"\"Object initializer", "1 / np.abs(1 - V_INF ** 2 - 2 *", "vinf_span, num_contours) # Check if color defined if not color:", "EE, TT, color) return self.ax def plot(self, body, vinf_span, num_contours=10,", "1) else: self.ax = axes # Force axes scale regarding", "data Parameters ---------- data : list Array containing [RR_P, RR_A,", "if self.kind == TisserandKind.APSIS: # Generate apsis lines lines =", "speed alpha_lim : tuple Minimum and maximum flyby angles. N", "Plotting lines for the Tisserand \"\"\" # Plot desired kind", "vinf_span = (vinf, vinf) # Solve Tisserand parameters RR_P, RR_A,", "* np.cos(ALPHA)) ECC_SC = np.sqrt(1 - 1 / A_SC *", "\"\"\"All possible Tisserand kinds\"\"\" APSIS = \"apsis\" ENERGY = \"energy\"", "* R_body) ** 3 / body.parent.k) EE = -body.parent.k /", "\"energy\" PERIOD = \"period\" class TisserandPlotter: \"\"\"Generates Tisserand figures\"\"\" def", "angle Parameters ---------- body : ~poliastro.bodies.Body Body to be plotted", "is the default plotting option \"\"\" # HACK: to reuse", "alpha_array) # Solving for non-dimensional a_sc and ecc_sc A_SC =", "~astropy.units.Quantity Desired Vinf for the flyby num_contours : int Number", "for any configuration V_INF, ALPHA = np.meshgrid(vinf_array, alpha_array) # Solving", "configuration V_INF, ALPHA = np.meshgrid(vinf_array, alpha_array) # Solving for non-dimensional", "A_SC * R_body * (1 + ECC_SC) TT = 2", "/ (2 * A_SC * R_body) # Build color lines", "Tisserand kinds\"\"\" APSIS = \"apsis\" ENERGY = \"energy\" PERIOD =", "= 2 * np.pi * np.sqrt((A_SC * R_body) ** 3", "from Tisserand parameters self._build_lines(RR_P, RR_A, EE, TT, color) return self.ax", "~poliastro.bodies.Body Body to be plotted Tisserand vinf : ~astropy.units.Quantity Vinf", "N=100 ): \"\"\"Solves all possible Tisserand lines with a meshgrid", "to internal canvas return RR_P, RR_A, EE, TT def _build_lines(self,", "TisserandKind.APSIS: self.ax.set_yscale(\"log\") def _solve_tisserand( self, body, vinf_span, num_contours, alpha_lim=(0, np.pi),", "and maximum flyby angles. N : int Number of points", "body, vinf_span, num_contours=2, alpha_lim=alpha_lim ) # Check if color defined", "RR_P.to(u.AU), EE.to(u.km ** 2 / u.s ** 2), color=color )", "color) return self.ax def plot(self, body, vinf_span, num_contours=10, color=None): \"\"\"Plots", "for generating Tisserand plots is the one depicted in \"Preliminary", "def _solve_tisserand( self, body, vinf_span, num_contours, alpha_lim=(0, np.pi), N=100 ):", "to be plotted Tisserand vinf : ~astropy.units.Quantity Vinf velocity line", "maximum flyby angles color : str String representing for the", "tuple vinf_span = (vinf, vinf) # Solve Tisserand parameters RR_P,", "Tisserand parameters self._build_lines(RR_P, RR_A, EE, TT, color) return self.ax def", "= \"period\" class TisserandPlotter: \"\"\"Generates Tisserand figures\"\"\" def __init__(self, kind=TisserandKind.APSIS,", "\"\"\"Collect lines and append them to internal data Parameters ----------", "Compute main Tisserand variables RR_P = A_SC * R_body *", "of points for flyby angle. Notes ----- The algorithm for", "\"\"\" from enum import Enum import numpy as np from", "A_SC - V_INF ** 2) / (2)) ** 2) #", "np.linspace(vinf_span[0], vinf_span[-1], num_contours) alpha_array = np.linspace(alpha_lim[0], alpha_lim[-1], N) vinf_array /=", "body : ~poliastro.bodies.Body Body to be plotted Tisserand vinf_array :", "__init__(self, kind=TisserandKind.APSIS, axes=None): \"\"\"Object initializer Parameters ---------- kind : TisserandKind", "- 1 / A_SC * ((3 - 1 / A_SC", "kind self.ax.set_xscale(\"log\") if self.kind == TisserandKind.APSIS: self.ax.set_yscale(\"log\") def _solve_tisserand( self,", "(1 - ECC_SC) RR_A = A_SC * R_body * (1", "line alpha_lim : tuple Minimum and maximum flyby angles color", "lines Returns ------- self.ax: ~matplotlib.axes.Axes Apsis tisserand is the default", "Tisserand solver, we transform input Vinf into a tuple vinf_span", "default plotting option \"\"\" # HACK: to reuse Tisserand solver,", "previously defined velocities color : str String representing for the", "plotting option \"\"\" # Solve Tisserand parameters RR_P, RR_A, EE,", "self.ax: ~matplotlib.axes.Axes Apsis tisserand is the default plotting option \"\"\"", ": tuple Minimum and maximum Vinf velocities num_contours : int", "Build color lines to internal canvas return RR_P, RR_A, EE,", "Minimum and maximum Vinf velocities num_contours : int Number of", "orbital elements Earth body_rv = get_mean_elements(body).to_vectors() R_body, V_body = norm(body_rv.r),", "internal canvas return RR_P, RR_A, EE, TT def _build_lines(self, RR_P,", "plot_line(self, body, vinf, alpha_lim=(0, np.pi), color=None): \"\"\"Plots body Tisserand line", "num_contours : int Number of points to iterate over previously", "color] Returns ------- lines: list Plotting lines for the Tisserand", "from enum import Enum import numpy as np from astropy", "= \"apsis\" ENERGY = \"energy\" PERIOD = \"period\" class TisserandPlotter:", "num_contours) # Check if color defined if not color: color", "((3 - 1 / A_SC - V_INF ** 2) /", "_build_lines(self, RR_P, RR_A, EE, TT, color): \"\"\"Collect lines and append", "RR_P = A_SC * R_body * (1 - ECC_SC) RR_A", "RR_A, EE, TT = self._solve_tisserand( body, vinf_span, num_contours=2, alpha_lim=alpha_lim )", "* R_body) # Build color lines to internal canvas return", "num_contours) alpha_array = np.linspace(alpha_lim[0], alpha_lim[-1], N) vinf_array /= V_body #", "\"\"\" # Plot desired kind lines if self.kind == TisserandKind.APSIS:", "option \"\"\" # Solve Tisserand parameters RR_P, RR_A, EE, TT", "body, vinf_span, num_contours, alpha_lim=(0, np.pi), N=100 ): \"\"\"Solves all possible", "Minimum and maximum flyby angles color : str String representing", "import get_mean_elements from poliastro.util import norm class TisserandKind(Enum): \"\"\"All possible", "\"\"\"Solves all possible Tisserand lines with a meshgrid workflow Parameters", "line within flyby angle Parameters ---------- body : ~poliastro.bodies.Body Body", "vinf_span, num_contours=2, alpha_lim=alpha_lim ) # Check if color defined if", "TT, color) return self.ax def plot(self, body, vinf_span, num_contours=10, color=None):", "depicted in \"Preliminary Trajectory Design of a Mission to Enceladus\"", "\"\"\"Plots body Tisserand line within flyby angle Parameters ---------- body", "RR_A = A_SC * R_body * (1 + ECC_SC) TT", "Array containing [RR_P, RR_A, EE, TT, color] Returns ------- lines:", "EE, TT def _build_lines(self, RR_P, RR_A, EE, TT, color): \"\"\"Collect", "PERIOD = \"period\" class TisserandPlotter: \"\"\"Generates Tisserand figures\"\"\" def __init__(self,", "u from matplotlib import pyplot as plt from poliastro.plotting._base import", "contour lines for flyby speed alpha_lim : tuple Minimum and", "generating Tisserand plots is the one depicted in \"Preliminary Trajectory", "** 3 / body.parent.k) EE = -body.parent.k / (2 *", "1 / A_SC - V_INF ** 2) / (2)) **", "A_SC * R_body * (1 - ECC_SC) RR_A = A_SC", "[RR_P, RR_A, EE, TT, color] Returns ------- lines: list Plotting", "the Tisserand \"\"\" # Plot desired kind lines if self.kind", "Plot desired kind lines if self.kind == TisserandKind.APSIS: # Generate", "num_contours=10, color=None): \"\"\"Plots body Tisserand for given amount of solutions", "): \"\"\"Solves all possible Tisserand lines with a meshgrid workflow", "vinf_array : ~astropy.units.Quantity Desired Vinf for the flyby num_contours :", "N : int Number of points for flyby angle. Notes", "Force axes scale regarding Tisserand kind self.ax.set_xscale(\"log\") if self.kind ==", "units as u from matplotlib import pyplot as plt from", "= -body.parent.k / (2 * A_SC * R_body) # Build", "R_body, V_body = norm(body_rv.r), norm(body_rv.v) # Generate non-dimensional velocity and", "transform input Vinf into a tuple vinf_span = (vinf, vinf)", "Tisserand vinf : ~astropy.units.Quantity Vinf velocity line alpha_lim : tuple", "body_rv = get_mean_elements(body).to_vectors() R_body, V_body = norm(body_rv.r), norm(body_rv.v) # Generate", "lines if self.kind == TisserandKind.APSIS: # Generate apsis lines lines", ": tuple Minimum and maximum flyby angles color : str", "option \"\"\" # HACK: to reuse Tisserand solver, we transform", "parameters RR_P, RR_A, EE, TT = self._solve_tisserand(body, vinf_span, num_contours) #", "span vinf_array = np.linspace(vinf_span[0], vinf_span[-1], num_contours) alpha_array = np.linspace(alpha_lim[0], alpha_lim[-1],", "canvas lines from Tisserand parameters self._build_lines(RR_P, RR_A, EE, TT, color)", "and alpha span vinf_array = np.linspace(vinf_span[0], vinf_span[-1], num_contours) alpha_array =", "lines for the Tisserand \"\"\" # Plot desired kind lines", "def __init__(self, kind=TisserandKind.APSIS, axes=None): \"\"\"Object initializer Parameters ---------- kind :", "elif self.kind == TisserandKind.ENERGY: # Generate energy lines lines =", "# Build canvas lines from Tisserand parameters self._build_lines(RR_P, RR_A, EE,", "as np from astropy import units as u from matplotlib", "The algorithm for generating Tisserand plots is the one depicted", "lines for flyby speed alpha_lim : tuple Minimum and maximum", "Apsis tisserand is the default plotting option \"\"\" # HACK:", "Solve Tisserand parameters RR_P, RR_A, EE, TT = self._solve_tisserand(body, vinf_span,", "np from astropy import units as u from matplotlib import", "for flyby angle. Notes ----- The algorithm for generating Tisserand", "return lines def plot_line(self, body, vinf, alpha_lim=(0, np.pi), color=None): \"\"\"Plots", "color : str String representing for the color lines Returns", "\"\"\" # Asign Tisserand kind self.kind = kind # Check", "self.ax = plt.subplots(1, 1) else: self.ax = axes # Force", "kinds\"\"\" APSIS = \"apsis\" ENERGY = \"energy\" PERIOD = \"period\"", "Trajectory Design of a Mission to Enceladus\" by David <NAME>,", "body, vinf, alpha_lim=(0, np.pi), color=None): \"\"\"Plots body Tisserand line within", "Solve Tisserand parameters RR_P, RR_A, EE, TT = self._solve_tisserand( body,", "vinf_span[-1], num_contours) alpha_array = np.linspace(alpha_lim[0], alpha_lim[-1], N) vinf_array /= V_body", "as u from matplotlib import pyplot as plt from poliastro.plotting._base", "angles color : str String representing for the color lines", "algorithm for generating Tisserand plots is the one depicted in", "ALPHA = np.meshgrid(vinf_array, alpha_array) # Solving for non-dimensional a_sc and", "V_INF * np.cos(ALPHA)) ECC_SC = np.sqrt(1 - 1 / A_SC", "** 2 - 2 * V_INF * np.cos(ALPHA)) ECC_SC =", "lines def plot_line(self, body, vinf, alpha_lim=(0, np.pi), color=None): \"\"\"Plots body", "TT def _build_lines(self, RR_P, RR_A, EE, TT, color): \"\"\"Collect lines", "if not color: color = BODY_COLORS[body.name] # Build canvas lines", "in \"Preliminary Trajectory Design of a Mission to Enceladus\" by", "body : ~poliastro.bodies.Body Body to be plotted Tisserand vinf :", "kind lines if self.kind == TisserandKind.APSIS: # Generate apsis lines", "regarding Tisserand kind self.ax.set_xscale(\"log\") if self.kind == TisserandKind.APSIS: self.ax.set_yscale(\"log\") def", "color=None): \"\"\"Plots body Tisserand for given amount of solutions within", "plotted Tisserand vinf_array : ~astropy.units.Quantity Desired Vinf for the flyby", "axes=None): \"\"\"Object initializer Parameters ---------- kind : TisserandKind Nature for", "energy lines lines = self.ax.plot( RR_P.to(u.AU), EE.to(u.km ** 2 /", "tuple Minimum and maximum flyby angles color : str String", "of contour lines for flyby speed alpha_lim : tuple Minimum", "David <NAME>, section 3.6 \"\"\" # Generate mean orbital elements", ": ~poliastro.bodies.Body Body to be plotted Tisserand vinf : ~astropy.units.Quantity", "Body to be plotted Tisserand vinf : ~astropy.units.Quantity Vinf velocity", "** 2 / u.s ** 2), color=color ) elif self.kind", "BODY_COLORS[body.name] # Build canvas lines from Tisserand parameters self._build_lines(RR_P, RR_A,", "a_sc and ecc_sc A_SC = 1 / np.abs(1 - V_INF", "* np.sqrt((A_SC * R_body) ** 3 / body.parent.k) EE =", "span Parameters ---------- body : ~poliastro.bodies.Body Body to be plotted", "as plt from poliastro.plotting._base import BODY_COLORS from poliastro.twobody.mean_elements import get_mean_elements", "TT = self._solve_tisserand( body, vinf_span, num_contours=2, alpha_lim=alpha_lim ) # Check", "Returns ------- self.ax: ~matplotlib.axes.Axes Apsis tisserand is the default plotting", "solver, we transform input Vinf into a tuple vinf_span =", "enum import Enum import numpy as np from astropy import", "/ np.abs(1 - V_INF ** 2 - 2 * V_INF", "V_INF ** 2 - 2 * V_INF * np.cos(ALPHA)) ECC_SC", "scale regarding Tisserand kind self.ax.set_xscale(\"log\") if self.kind == TisserandKind.APSIS: self.ax.set_yscale(\"log\")", "np.pi), N=100 ): \"\"\"Solves all possible Tisserand lines with a", "plotted Tisserand vinf : ~astropy.units.Quantity Vinf velocity line alpha_lim :", "== TisserandKind.PERIOD: # Generate period lines lines = self.ax.plot(RR_P.to(u.AU), TT.to(u.year),", "Number of contour lines for flyby speed alpha_lim : tuple", "TT, color] Returns ------- lines: list Plotting lines for the", "parameters self._build_lines(RR_P, RR_A, EE, TT, color) return self.ax def plot(self,", "the color lines Returns ------- self.ax: ~matplotlib.axes.Axes Apsis tisserand is", "color lines to internal canvas return RR_P, RR_A, EE, TT", "to iterate over previously defined velocities color : str String", ": str String representing for the color lines Returns -------", "EE, TT = self._solve_tisserand( body, vinf_span, num_contours=2, alpha_lim=alpha_lim ) #", "\"\"\" # HACK: to reuse Tisserand solver, we transform input", "to be plotted Tisserand vinf_array : ~astropy.units.Quantity Desired Vinf for", "2 / u.s ** 2), color=color ) elif self.kind ==", "lines from Tisserand parameters self._build_lines(RR_P, RR_A, EE, TT, color) return", "~poliastro.bodies.Body Body to be plotted Tisserand vinf_span : tuple Minimum", "containing [RR_P, RR_A, EE, TT, color] Returns ------- lines: list", "given amount of solutions within Vinf span Parameters ---------- body", "= \"energy\" PERIOD = \"period\" class TisserandPlotter: \"\"\"Generates Tisserand figures\"\"\"", "workflow Parameters ---------- body : ~poliastro.bodies.Body Body to be plotted", "a tuple vinf_span = (vinf, vinf) # Solve Tisserand parameters", "not axes: _, self.ax = plt.subplots(1, 1) else: self.ax =", "is the one depicted in \"Preliminary Trajectory Design of a", "and maximum flyby angles color : str String representing for", "them to internal data Parameters ---------- data : list Array" ]
[ "= RequestFactory() class Rack: rack_name = None tree = None", "sorted(self.systems, key=lambda k: k['system_slot']) try: self.ru = self.kv.keyvalue_set.get(key='rack_ru').value except: self.ru", "KeyValue as KeyValue from django.test.client import RequestFactory from api_v2.keyvalue_handler import", "(self.rack_name)) self.ethernet_patch_panel_24 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 24) self.ethernet_patch_panel_48 = self._get_ethernet_patch_panels(self.kv, 'ethernet',", "= None systems = [] ethernet_patch_panel_24 = [] ethernet_patch_panel_48 =", "h = KeyValueHandler() for s in self.system_list: request = factory.get('/api/v2/keyvalue/?keystore=%s'", "pdb h = KeyValueHandler() for s in self.system_list: request =", "self.ethernet_patch_panel_48 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 48) import pdb h = KeyValueHandler()", "type) if str(i.key) == match_string: ret.append(i.value) return ret def _get_system_ru(self,", "follow=True) tree = h.read(request) system_ru = self._get_system_ru(tree) system_image = self._get_system_image(tree)", "if str(i.key) == match_string: ret.append(i.value) return ret def _get_system_ru(self, tree):", "30 def _get_ethernet_patch_panels(self, tree, type, port_count): ret = [] for", "'system_ru' in i.split(':'): return tree[i] except: pass return 4 def", "self.kv.keyvalue_set.get(key='rack_ru').value except: self.ru = 42 try: self.width = self.kv.keyvalue_set.get(key='rack_width').value except:", "42 try: self.width = self.kv.keyvalue_set.get(key='rack_width').value except: self.width = 30 def", "in i.split(':'): return tree[i] except: pass return None def _get_system_slot(self,", "= None width = None systems = [] ethernet_patch_panel_24 =", "_get_system_slot(self, tree): for i in tree.iterkeys(): try: if 'system_slot' in", "None def _get_system_slot(self, tree): for i in tree.iterkeys(): try: if", "rack_name self.kv = Truth.objects.select_related('truth_key_value').get(name=self.rack_name) self.system_list = KeyValue.objects.select_related('system').filter(value__contains=\"truth:%s\" % (self.rack_name)) self.ethernet_patch_panel_24", "\"system_id\":s.system.id, \"system_ru\":system_ru, \"system_image\":system_image, 'system_slot':system_slot, 'operating_system':str(s.system.operating_system), 'server_model': str(s.system.server_model), 'oob_ip': str(s.system.oob_ip), })", "ethernet_patch_panel_48 = [] def __init__(self, rack_name): self.systems = [] self.rack_name", "type, port_count): ret = [] for i in tree.keyvalue_set.all(): match_string", "= [] ethernet_patch_panel_48 = [] def __init__(self, rack_name): self.systems =", "k['system_slot']) try: self.ru = self.kv.keyvalue_set.get(key='rack_ru').value except: self.ru = 42 try:", "for s in self.system_list: request = factory.get('/api/v2/keyvalue/?keystore=%s' % (s.system.hostname), follow=True)", "self.ru = 42 try: self.width = self.kv.keyvalue_set.get(key='rack_width').value except: self.width =", "factory.get('/api/v2/keyvalue/?keystore=%s' % (s.system.hostname), follow=True) tree = h.read(request) system_ru = self._get_system_ru(tree)", "import KeyValueHandler import json factory = RequestFactory() class Rack: rack_name", "systems.models import KeyValue as KeyValue from django.test.client import RequestFactory from", "= Truth.objects.select_related('truth_key_value').get(name=self.rack_name) self.system_list = KeyValue.objects.select_related('system').filter(value__contains=\"truth:%s\" % (self.rack_name)) self.ethernet_patch_panel_24 = self._get_ethernet_patch_panels(self.kv,", "tree): for i in tree.iterkeys(): try: if 'system_ru' in i.split(':'):", "= self.kv.keyvalue_set.get(key='rack_width').value except: self.width = 30 def _get_ethernet_patch_panels(self, tree, type,", "import KeyValue as TruthKeyValue, Truth from systems.models import KeyValue as", "= self._get_system_ru(tree) system_image = self._get_system_image(tree) system_slot = self._get_system_slot(tree) self.systems.append({ \"system_name\":s.system.hostname,", "'ethernet', 24) self.ethernet_patch_panel_48 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 48) import pdb h", "django.test.client import RequestFactory from api_v2.keyvalue_handler import KeyValueHandler import json factory", "}) self.systems = sorted(self.systems, key=lambda k: k['system_slot']) try: self.ru =", "for i in tree.iterkeys(): try: if 'system_image' in i.split(':'): return", "KeyValue.objects.select_related('system').filter(value__contains=\"truth:%s\" % (self.rack_name)) self.ethernet_patch_panel_24 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 24) self.ethernet_patch_panel_48 =", "class Rack: rack_name = None tree = None kv =", "_get_system_image(self, tree): for i in tree.iterkeys(): try: if 'system_image' in", "truth.models import KeyValue as TruthKeyValue, Truth from systems.models import KeyValue", "= factory.get('/api/v2/keyvalue/?keystore=%s' % (s.system.hostname), follow=True) tree = h.read(request) system_ru =", "return None def _get_system_slot(self, tree): for i in tree.iterkeys(): try:", "% (port_count, type) if str(i.key) == match_string: ret.append(i.value) return ret", "_get_system_ru(self, tree): for i in tree.iterkeys(): try: if 'system_ru' in", "from django.test.client import RequestFactory from api_v2.keyvalue_handler import KeyValueHandler import json", "= [] ethernet_patch_panel_24 = [] ethernet_patch_panel_48 = [] def __init__(self,", "ethernet_patch_panel_24 = [] ethernet_patch_panel_48 = [] def __init__(self, rack_name): self.systems", "'ethernet', 48) import pdb h = KeyValueHandler() for s in", "Truth from systems.models import KeyValue as KeyValue from django.test.client import", "s in self.system_list: request = factory.get('/api/v2/keyvalue/?keystore=%s' % (s.system.hostname), follow=True) tree", "tree = h.read(request) system_ru = self._get_system_ru(tree) system_image = self._get_system_image(tree) system_slot", "try: if 'system_image' in i.split(':'): return tree[i] except: pass return", "Truth.objects.select_related('truth_key_value').get(name=self.rack_name) self.system_list = KeyValue.objects.select_related('system').filter(value__contains=\"truth:%s\" % (self.rack_name)) self.ethernet_patch_panel_24 = self._get_ethernet_patch_panels(self.kv, 'ethernet',", "[] self.rack_name = rack_name self.kv = Truth.objects.select_related('truth_key_value').get(name=self.rack_name) self.system_list = KeyValue.objects.select_related('system').filter(value__contains=\"truth:%s\"", "'system_image' in i.split(':'): return tree[i] except: pass return None def", "tree = None kv = None ru = None width", "ret = [] for i in tree.keyvalue_set.all(): match_string = \"%i_port_%s_patch_panel\"", "i in tree.iterkeys(): try: if 'system_slot' in i.split(':'): return tree[i]", "tree.iterkeys(): try: if 'system_image' in i.split(':'): return tree[i] except: pass", "for i in tree.iterkeys(): try: if 'system_slot' in i.split(':'): return", "system_slot = self._get_system_slot(tree) self.systems.append({ \"system_name\":s.system.hostname, \"system_id\":s.system.id, \"system_ru\":system_ru, \"system_image\":system_image, 'system_slot':system_slot, 'operating_system':str(s.system.operating_system),", "str(s.system.server_model), 'oob_ip': str(s.system.oob_ip), }) self.systems = sorted(self.systems, key=lambda k: k['system_slot'])", "str(s.system.oob_ip), }) self.systems = sorted(self.systems, key=lambda k: k['system_slot']) try: self.ru", "__init__(self, rack_name): self.systems = [] self.rack_name = rack_name self.kv =", "from KeyValueTree import KeyValueTree from truth.models import KeyValue as TruthKeyValue,", "== match_string: ret.append(i.value) return ret def _get_system_ru(self, tree): for i", "self.ethernet_patch_panel_24 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 24) self.ethernet_patch_panel_48 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 48)", "self.systems = sorted(self.systems, key=lambda k: k['system_slot']) try: self.ru = self.kv.keyvalue_set.get(key='rack_ru').value", "k: k['system_slot']) try: self.ru = self.kv.keyvalue_set.get(key='rack_ru').value except: self.ru = 42", "= [] def __init__(self, rack_name): self.systems = [] self.rack_name =", "in tree.keyvalue_set.all(): match_string = \"%i_port_%s_patch_panel\" % (port_count, type) if str(i.key)", "self._get_system_ru(tree) system_image = self._get_system_image(tree) system_slot = self._get_system_slot(tree) self.systems.append({ \"system_name\":s.system.hostname, \"system_id\":s.system.id,", "in i.split(':'): return tree[i] except: pass return 4 def _get_system_image(self,", "= self._get_system_slot(tree) self.systems.append({ \"system_name\":s.system.hostname, \"system_id\":s.system.id, \"system_ru\":system_ru, \"system_image\":system_image, 'system_slot':system_slot, 'operating_system':str(s.system.operating_system), 'server_model':", "in tree.iterkeys(): try: if 'system_image' in i.split(':'): return tree[i] except:", "key=lambda k: k['system_slot']) try: self.ru = self.kv.keyvalue_set.get(key='rack_ru').value except: self.ru =", "\"system_name\":s.system.hostname, \"system_id\":s.system.id, \"system_ru\":system_ru, \"system_image\":system_image, 'system_slot':system_slot, 'operating_system':str(s.system.operating_system), 'server_model': str(s.system.server_model), 'oob_ip': str(s.system.oob_ip),", "'server_model': str(s.system.server_model), 'oob_ip': str(s.system.oob_ip), }) self.systems = sorted(self.systems, key=lambda k:", "= \"%i_port_%s_patch_panel\" % (port_count, type) if str(i.key) == match_string: ret.append(i.value)", "systems = [] ethernet_patch_panel_24 = [] ethernet_patch_panel_48 = [] def", "ret.append(i.value) return ret def _get_system_ru(self, tree): for i in tree.iterkeys():", "tree): for i in tree.iterkeys(): try: if 'system_image' in i.split(':'):", "self.ru = self.kv.keyvalue_set.get(key='rack_ru').value except: self.ru = 42 try: self.width =", "self.kv.keyvalue_set.get(key='rack_width').value except: self.width = 30 def _get_ethernet_patch_panels(self, tree, type, port_count):", "[] ethernet_patch_panel_24 = [] ethernet_patch_panel_48 = [] def __init__(self, rack_name):", "'system_slot':system_slot, 'operating_system':str(s.system.operating_system), 'server_model': str(s.system.server_model), 'oob_ip': str(s.system.oob_ip), }) self.systems = sorted(self.systems,", "self._get_ethernet_patch_panels(self.kv, 'ethernet', 48) import pdb h = KeyValueHandler() for s", "for i in tree.keyvalue_set.all(): match_string = \"%i_port_%s_patch_panel\" % (port_count, type)", "self.width = self.kv.keyvalue_set.get(key='rack_width').value except: self.width = 30 def _get_ethernet_patch_panels(self, tree,", "= None kv = None ru = None width =", "kv = None ru = None width = None systems", "\"%i_port_%s_patch_panel\" % (port_count, type) if str(i.key) == match_string: ret.append(i.value) return", "ret def _get_system_ru(self, tree): for i in tree.iterkeys(): try: if", "i.split(':'): return tree[i] except: pass return None def _get_system_slot(self, tree):", "i.split(':'): return tree[i] except: pass return 4 def _get_system_image(self, tree):", "tree, type, port_count): ret = [] for i in tree.keyvalue_set.all():", "self.width = 30 def _get_ethernet_patch_panels(self, tree, type, port_count): ret =", "self._get_system_image(tree) system_slot = self._get_system_slot(tree) self.systems.append({ \"system_name\":s.system.hostname, \"system_id\":s.system.id, \"system_ru\":system_ru, \"system_image\":system_image, 'system_slot':system_slot,", "tree.keyvalue_set.all(): match_string = \"%i_port_%s_patch_panel\" % (port_count, type) if str(i.key) ==", "4 def _get_system_image(self, tree): for i in tree.iterkeys(): try: if", "= rack_name self.kv = Truth.objects.select_related('truth_key_value').get(name=self.rack_name) self.system_list = KeyValue.objects.select_related('system').filter(value__contains=\"truth:%s\" % (self.rack_name))", "KeyValueTree import KeyValueTree from truth.models import KeyValue as TruthKeyValue, Truth", "json factory = RequestFactory() class Rack: rack_name = None tree", "= None tree = None kv = None ru =", "in tree.iterkeys(): try: if 'system_slot' in i.split(':'): return tree[i] except:", "(port_count, type) if str(i.key) == match_string: ret.append(i.value) return ret def", "KeyValueTree from truth.models import KeyValue as TruthKeyValue, Truth from systems.models", "h.read(request) system_ru = self._get_system_ru(tree) system_image = self._get_system_image(tree) system_slot = self._get_system_slot(tree)", "= self.kv.keyvalue_set.get(key='rack_ru').value except: self.ru = 42 try: self.width = self.kv.keyvalue_set.get(key='rack_width').value", "try: if 'system_slot' in i.split(':'): return tree[i] except: pass return", "[] for i in tree.keyvalue_set.all(): match_string = \"%i_port_%s_patch_panel\" % (port_count,", "= None ru = None width = None systems =", "self.system_list: request = factory.get('/api/v2/keyvalue/?keystore=%s' % (s.system.hostname), follow=True) tree = h.read(request)", "except: self.ru = 42 try: self.width = self.kv.keyvalue_set.get(key='rack_width').value except: self.width", "% (s.system.hostname), follow=True) tree = h.read(request) system_ru = self._get_system_ru(tree) system_image", "return tree[i] except: pass return 4 def _get_system_image(self, tree): for", "= KeyValueHandler() for s in self.system_list: request = factory.get('/api/v2/keyvalue/?keystore=%s' %", "\"system_image\":system_image, 'system_slot':system_slot, 'operating_system':str(s.system.operating_system), 'server_model': str(s.system.server_model), 'oob_ip': str(s.system.oob_ip), }) self.systems =", "self._get_ethernet_patch_panels(self.kv, 'ethernet', 24) self.ethernet_patch_panel_48 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 48) import pdb", "tree[i] except: pass return 4 def _get_system_image(self, tree): for i", "tree): for i in tree.iterkeys(): try: if 'system_slot' in i.split(':'):", "None kv = None ru = None width = None", "rack_name): self.systems = [] self.rack_name = rack_name self.kv = Truth.objects.select_related('truth_key_value').get(name=self.rack_name)", "None width = None systems = [] ethernet_patch_panel_24 = []", "system_image = self._get_system_image(tree) system_slot = self._get_system_slot(tree) self.systems.append({ \"system_name\":s.system.hostname, \"system_id\":s.system.id, \"system_ru\":system_ru,", "= KeyValue.objects.select_related('system').filter(value__contains=\"truth:%s\" % (self.rack_name)) self.ethernet_patch_panel_24 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 24) self.ethernet_patch_panel_48", "import KeyValue as KeyValue from django.test.client import RequestFactory from api_v2.keyvalue_handler", "port_count): ret = [] for i in tree.keyvalue_set.all(): match_string =", "= h.read(request) system_ru = self._get_system_ru(tree) system_image = self._get_system_image(tree) system_slot =", "for i in tree.iterkeys(): try: if 'system_ru' in i.split(':'): return", "KeyValue from django.test.client import RequestFactory from api_v2.keyvalue_handler import KeyValueHandler import", "i in tree.iterkeys(): try: if 'system_ru' in i.split(':'): return tree[i]", "= self._get_ethernet_patch_panels(self.kv, 'ethernet', 24) self.ethernet_patch_panel_48 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 48) import", "def _get_system_ru(self, tree): for i in tree.iterkeys(): try: if 'system_ru'", "TruthKeyValue, Truth from systems.models import KeyValue as KeyValue from django.test.client", "except: pass return 4 def _get_system_image(self, tree): for i in", "(s.system.hostname), follow=True) tree = h.read(request) system_ru = self._get_system_ru(tree) system_image =", "= [] self.rack_name = rack_name self.kv = Truth.objects.select_related('truth_key_value').get(name=self.rack_name) self.system_list =", "import RequestFactory from api_v2.keyvalue_handler import KeyValueHandler import json factory =", "def _get_ethernet_patch_panels(self, tree, type, port_count): ret = [] for i", "from api_v2.keyvalue_handler import KeyValueHandler import json factory = RequestFactory() class", "if 'system_slot' in i.split(':'): return tree[i] except: pass return 1", "KeyValueHandler import json factory = RequestFactory() class Rack: rack_name =", "if 'system_image' in i.split(':'): return tree[i] except: pass return None", "tree.iterkeys(): try: if 'system_slot' in i.split(':'): return tree[i] except: pass", "Rack: rack_name = None tree = None kv = None", "as KeyValue from django.test.client import RequestFactory from api_v2.keyvalue_handler import KeyValueHandler", "api_v2.keyvalue_handler import KeyValueHandler import json factory = RequestFactory() class Rack:", "i in tree.iterkeys(): try: if 'system_image' in i.split(':'): return tree[i]", "rack_name = None tree = None kv = None ru", "from truth.models import KeyValue as TruthKeyValue, Truth from systems.models import", "return ret def _get_system_ru(self, tree): for i in tree.iterkeys(): try:", "except: pass return None def _get_system_slot(self, tree): for i in", "tree[i] except: pass return None def _get_system_slot(self, tree): for i", "self.systems.append({ \"system_name\":s.system.hostname, \"system_id\":s.system.id, \"system_ru\":system_ru, \"system_image\":system_image, 'system_slot':system_slot, 'operating_system':str(s.system.operating_system), 'server_model': str(s.system.server_model), 'oob_ip':", "in tree.iterkeys(): try: if 'system_ru' in i.split(':'): return tree[i] except:", "[] ethernet_patch_panel_48 = [] def __init__(self, rack_name): self.systems = []", "'operating_system':str(s.system.operating_system), 'server_model': str(s.system.server_model), 'oob_ip': str(s.system.oob_ip), }) self.systems = sorted(self.systems, key=lambda", "24) self.ethernet_patch_panel_48 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 48) import pdb h =", "= self._get_system_image(tree) system_slot = self._get_system_slot(tree) self.systems.append({ \"system_name\":s.system.hostname, \"system_id\":s.system.id, \"system_ru\":system_ru, \"system_image\":system_image,", "_get_ethernet_patch_panels(self, tree, type, port_count): ret = [] for i in", "None tree = None kv = None ru = None", "KeyValue as TruthKeyValue, Truth from systems.models import KeyValue as KeyValue", "ru = None width = None systems = [] ethernet_patch_panel_24", "None systems = [] ethernet_patch_panel_24 = [] ethernet_patch_panel_48 = []", "import KeyValueTree from truth.models import KeyValue as TruthKeyValue, Truth from", "import pdb h = KeyValueHandler() for s in self.system_list: request", "system_ru = self._get_system_ru(tree) system_image = self._get_system_image(tree) system_slot = self._get_system_slot(tree) self.systems.append({", "self.kv = Truth.objects.select_related('truth_key_value').get(name=self.rack_name) self.system_list = KeyValue.objects.select_related('system').filter(value__contains=\"truth:%s\" % (self.rack_name)) self.ethernet_patch_panel_24 =", "[] def __init__(self, rack_name): self.systems = [] self.rack_name = rack_name", "def __init__(self, rack_name): self.systems = [] self.rack_name = rack_name self.kv", "= self._get_ethernet_patch_panels(self.kv, 'ethernet', 48) import pdb h = KeyValueHandler() for", "i in tree.keyvalue_set.all(): match_string = \"%i_port_%s_patch_panel\" % (port_count, type) if", "% (self.rack_name)) self.ethernet_patch_panel_24 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 24) self.ethernet_patch_panel_48 = self._get_ethernet_patch_panels(self.kv,", "KeyValueHandler() for s in self.system_list: request = factory.get('/api/v2/keyvalue/?keystore=%s' % (s.system.hostname),", "pass return 4 def _get_system_image(self, tree): for i in tree.iterkeys():", "self.systems = [] self.rack_name = rack_name self.kv = Truth.objects.select_related('truth_key_value').get(name=self.rack_name) self.system_list", "def _get_system_slot(self, tree): for i in tree.iterkeys(): try: if 'system_slot'", "match_string = \"%i_port_%s_patch_panel\" % (port_count, type) if str(i.key) == match_string:", "tree.iterkeys(): try: if 'system_ru' in i.split(':'): return tree[i] except: pass", "= 42 try: self.width = self.kv.keyvalue_set.get(key='rack_width').value except: self.width = 30", "48) import pdb h = KeyValueHandler() for s in self.system_list:", "str(i.key) == match_string: ret.append(i.value) return ret def _get_system_ru(self, tree): for", "try: if 'system_ru' in i.split(':'): return tree[i] except: pass return", "self.rack_name = rack_name self.kv = Truth.objects.select_related('truth_key_value').get(name=self.rack_name) self.system_list = KeyValue.objects.select_related('system').filter(value__contains=\"truth:%s\" %", "self._get_system_slot(tree) self.systems.append({ \"system_name\":s.system.hostname, \"system_id\":s.system.id, \"system_ru\":system_ru, \"system_image\":system_image, 'system_slot':system_slot, 'operating_system':str(s.system.operating_system), 'server_model': str(s.system.server_model),", "RequestFactory() class Rack: rack_name = None tree = None kv", "= 30 def _get_ethernet_patch_panels(self, tree, type, port_count): ret = []", "request = factory.get('/api/v2/keyvalue/?keystore=%s' % (s.system.hostname), follow=True) tree = h.read(request) system_ru", "= sorted(self.systems, key=lambda k: k['system_slot']) try: self.ru = self.kv.keyvalue_set.get(key='rack_ru').value except:", "try: self.ru = self.kv.keyvalue_set.get(key='rack_ru').value except: self.ru = 42 try: self.width", "from systems.models import KeyValue as KeyValue from django.test.client import RequestFactory", "except: self.width = 30 def _get_ethernet_patch_panels(self, tree, type, port_count): ret", "return tree[i] except: pass return None def _get_system_slot(self, tree): for", "if 'system_ru' in i.split(':'): return tree[i] except: pass return 4", "'oob_ip': str(s.system.oob_ip), }) self.systems = sorted(self.systems, key=lambda k: k['system_slot']) try:", "import json factory = RequestFactory() class Rack: rack_name = None", "factory = RequestFactory() class Rack: rack_name = None tree =", "= [] for i in tree.keyvalue_set.all(): match_string = \"%i_port_%s_patch_panel\" %", "try: self.width = self.kv.keyvalue_set.get(key='rack_width').value except: self.width = 30 def _get_ethernet_patch_panels(self,", "RequestFactory from api_v2.keyvalue_handler import KeyValueHandler import json factory = RequestFactory()", "None ru = None width = None systems = []", "width = None systems = [] ethernet_patch_panel_24 = [] ethernet_patch_panel_48", "in self.system_list: request = factory.get('/api/v2/keyvalue/?keystore=%s' % (s.system.hostname), follow=True) tree =", "match_string: ret.append(i.value) return ret def _get_system_ru(self, tree): for i in", "pass return None def _get_system_slot(self, tree): for i in tree.iterkeys():", "self.system_list = KeyValue.objects.select_related('system').filter(value__contains=\"truth:%s\" % (self.rack_name)) self.ethernet_patch_panel_24 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 24)", "\"system_ru\":system_ru, \"system_image\":system_image, 'system_slot':system_slot, 'operating_system':str(s.system.operating_system), 'server_model': str(s.system.server_model), 'oob_ip': str(s.system.oob_ip), }) self.systems", "as TruthKeyValue, Truth from systems.models import KeyValue as KeyValue from", "def _get_system_image(self, tree): for i in tree.iterkeys(): try: if 'system_image'", "return 4 def _get_system_image(self, tree): for i in tree.iterkeys(): try:" ]
[ "from .sort import sort function_map = { 'trim': trim, 'sample':", ".sort import sort function_map = { 'trim': trim, 'sample': sample,", "import sort function_map = { 'trim': trim, 'sample': sample, 'sort':", "import trim from .sample import sample from .sort import sort", "from .sample import sample from .sort import sort function_map =", "function_map = { 'trim': trim, 'sample': sample, 'sort': sort }", "sort function_map = { 'trim': trim, 'sample': sample, 'sort': sort", "trim from .sample import sample from .sort import sort function_map", "from .trim import trim from .sample import sample from .sort", "import sample from .sort import sort function_map = { 'trim':", ".sample import sample from .sort import sort function_map = {", ".trim import trim from .sample import sample from .sort import", "sample from .sort import sort function_map = { 'trim': trim," ]
[]
[ "= self.generate(\"<nverb>\", 1).strip() # else: # v = self.generate(\"<verb>\", 1).strip()", "= newPoem.replace(\"newline ,\", \", newline\") newPoem = newPoem.replace(\"newline\", \"\\n\") newPoem", "= self.generate(\"<padj>\",1) else: if random.randint(1, 100) < THEME_PROB: v =", "\\n \\n\", \"\\n\\n\") newPoem = newPoem.replace(\"\\n \\n \", \"\\n\\n\") newPoem", "else: newPoem2 = newPoem2 + \" <br />\\n\" newPoem2 =", "for word in poem2: if \"newline\" in word: breaks +=", "= False else: if breaks > 1: capitalize = True", "in word: v = self.generate(\"<nnoun>\", 1).strip() else: v = self.generate(\"<noun>\",", "print(\"DOWNLOD NODECUBE\") print(\"\"\"wget https://www.nodebox.net/code/data/media/linguistics.zip unzip linguistics.zip\"\"\") VERSION = \"1.1\" THEME_PROB", "capitalize = True if capitalize == True and \"newline\" not", "= self.generate(\"<verb>\", 1).strip() if random.randint(1, 100) < THEME_PROB: v =", "= self.generate(\"<pnoun>\", 1).strip() elif \"nnoun\" in word: v = self.generate(\"<nnoun>\",", "poem = self.generate(key, 1) poem = poem.replace(\" ,\", \",\") puncuation", "\" \" elif \"person\" in word: v = self.generate(\"<person>\", 1).strip()", "an a\") newPoem = newPoem.replace(\"newline .\", \". newline\") newPoem =", "\",\", \"your\", \"by\", \"like\", \"to\", \"you\", \"your\", \"a\", \"are\", \"become\",", "'poem' if 'mushy' in sys.argv[1:]: poemtype = 'mushypoem' p,seed_str=generate_poem(poemtype) print((\"*\"*30", "= title.replace(\".\", \"\") newPoem = newPoem.replace(title, \"<h1>\" + newTitle +", "en.noun.plural(v) string = string + v + \" \" elif", "in list(set(puncuation)): if punc in word: capitalize = True poem3.append(word)", "< 0 and \"newline\" not in word: isgood = True", "\" \".join(poem3) newPoem = newPoem.replace(\" a a\", \" an a\")", "in gram[i]: string = gram[i] else: for word in gram[i].split():", "in word: v = en.noun.plural(v) string = string + self.generate(word,", "in list(set(puncuation)): newPoem = newPoem.replace(\" \" + punc, punc) for", "\"verb\" in word and word != '<adverb>': if \"pverb\" in", "newPoem = newPoem.replace(\" \" + punc, punc) newPoem = newPoem.replace(\"", "v + \" \" elif \"adj\" in word: if \"mushy\"", "newPoem = newPoem.replace(\"newline !\", \"! newline\") newPoem = newPoem.replace(\"newline ,\",", "\" + punc, punc) for punc in list(set(puncuation)): newPoem =", "in word: isgood = True for dontbreak in list(dontbreaks +", "string = string + self.generate(word, 1) + \" \" else:", "random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-verb>\", 1).strip() if \"verb-inf\"", "+ '>',hex_seed) return p,seed_str if __name__ == '__main__': poemtype =", "newPoem.replace(\" ,\", \",\") newPoem = newPoem.replace(\"?.\", \"?\") newPoem = newPoem.replace(\".?\",", "in word: # noPunc = False # if noPunc: #", "= False for punc in list(set(puncuation)): if punc in word:", "newPoem.replace(\"?.\", \"?\") newPoem = newPoem.replace(\".?\", \".\") newPoem = newPoem.replace(\",.\", \",\")", "\"i'\" in word: word = word.capitalize() poem3.append(word) capitalize = False", "string + word + \" \" else: if \"verb\" in", "0 if beforeFirstBreak or word == \"i\" or \"i'\" in", "newPoem = newPoem.replace(\"newline\", \"\\n\") newPoem = newPoem.replace(\" \\n \\n\", \"\\n\\n\")", "\"<\" not in word: string = string + word +", "in word: string = string + \\ en.verb.present( v, person=3,", "= newPoem.replace(title, \"<h1>\" + newTitle + \"</h1>\") newPoem2 = \"\"", "v = self.generate(\"<theme-noun>\", 1).strip() if \"pl\" in word: v =", "self.generate(word, 1) + \" \" else: if \"-pl\" in word:", "\" elif \"noun\" in word: if \"pnoun\" in word or", "class bnfDictionary: def __init__(self, file): self.grammar = yaml.load(open(file,'r')) self.poemtype =", "\"behind\", \"the\", \"when\", \"what\", \"why\", \"who\", \",\", \"your\", \"by\", \"like\",", "punc in word: # noPunc = False # if noPunc:", "= string + self.generate(word, 1) + \" \" elif \"person\"", "isgood = True for dontbreak in list(dontbreaks + puncuation): if", "== \"i\" or \"i'\" in word: word = word.capitalize() poem3.append(word)", "else: if breaks > 1: capitalize = True if capitalize", "+ word + \" \" else: if \"verb\" in word", "+ \" \" elif \"adj\" in word: if \"mushy\" in", "= 0 poem2 = [] foundFirstBreak = False for word", "word and foundFirstBreak: isgood = True for dontbreak in list(dontbreaks", "# noPunc = True # for punc in list(set(puncuation)): #", "1).strip() elif \"nverb\" in word: v = self.generate(\"<nverb>\", 1).strip() #", "len(gram) - 1) string = \"\" if \"<\" not in", "re try: import en except: print(\"DOWNLOD NODECUBE\") print(\"\"\"wget https://www.nodebox.net/code/data/media/linguistics.zip unzip", "else: v = self.generate(\"<noun>\", 1).strip() if random.randint(1, 100) < THEME_PROB:", "v = en.noun.plural(v) string = string + self.generate(word, 1) +", "v = self.generate(\"<nnoun>\", 1).strip() else: v = self.generate(\"<noun>\", 1).strip() if", "def generatePretty(self, key, seed_str): if seed_str == None: seed_str =", "firstLine == False: firstLine = True newPoem2 = newPoem2 +", "\"verb-pr\" in word: string = string + \\ en.verb.present( v,", "= 0 if beforeFirstBreak or word == \"i\" or \"i'\"", "newPoem = newPoem.replace(\"\\n \\n \", \"\\n\\n\") newPoem = newPoem.replace(\" '\",", "num): gram = self.grammar[key] if len(gram)==1: i = 0 else:", "+ \"\\n\"*5)) filtered = [] for line in re.sub(\"<.*?>\", \"", "in word: string = string + \\ en.verb.present_participle(v) + \"", "string + en.verb.past(v) + \" \" else: string = string", "= self.generate(word, 1).strip() string = string + v + \"", "in word: v = en.noun.plural(v) string = string + v", "and not secondLine: newPoem2 = newPoem2 + \"<p>\\n\" secondLine =", "import datetime import os import random import sys import uuid", "+ v + \" \" elif \"person\" in word: v", "word: v = en.noun.plural(self.generate(word.replace(\"-pl\",\"\"),1)) else: v = self.generate(word, 1) string", "\"like\", \"to\", \"you\", \"your\", \"a\", \"are\", \"become\", \"newline\"] capitalize =", "word: foundFirstBreak = True poem3 = [] beforeFirstBreak = True", "line + \" \\n\" if firstLine and secondLine: newPoem2 =", "+ \" \" elif \"person\" in word: v = self.generate(\"<person>\",", "foundFirstBreak = False for word in poem.replace(\"\\n\", \"newline\").split(): poem2.append(word.lower()) if", "import base64 import yaml import re try: import en except:", "if key == \"<mushypoem>\": key = \"<poem>\" poem = self.generate(key,", "< THEME_PROB: v = self.generate(\"<theme-noun>\", 1).strip() if \"pl\" in word:", "self.generate(\"<padj>\",1) else: if random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-adj>\",", "string = string + v + \" \" elif \"person\"", "+ \"</h1>\") newPoem2 = \"\" firstLine = False secondLine =", "newPoem = newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\", \".\") title =", "secondLine = False for line in newPoem.split(\"\\n\"): if len(line) >", "for punc in list(set(puncuation)): newPoem = newPoem.replace(\" \" + punc,", "word: v = self.generate(\"<nverb>\", 1).strip() # else: # v =", "seed_str): if seed_str == None: seed_str = str(uuid.uuid4()).split(\"-\")[0] random.seed(uuid.uuid5(uuid.NAMESPACE_DNS,seed_str).int) #tool", "\", p).split(\"\\n\"): if len(line.strip()) > 0: filtered.append(line.strip()) else: filtered.append(\"pause\") print(p)", "True # noPunc = True # for punc in list(set(puncuation)):", "word: string = string + word + \" \" else:", "newline\") newPoem = newPoem.replace(\"newline\", \"\\n\") newPoem = newPoem.replace(\" \\n \\n\",", "capitalize = True poem3.append(word) if random.randint(1, 100) < 0 and", "'\", \"'\") for punc in list(set(puncuation)): newPoem = newPoem.replace(\" \"", "\"1.1\" THEME_PROB = 0 class bnfDictionary: def __init__(self, file): self.grammar", "\" \", p).split(\"\\n\"): if len(line.strip()) > 0: filtered.append(line.strip()) else: filtered.append(\"pause\")", "\"newline\" not in word: word = word.capitalize() capitalize = False", "\"?\"] dontbreaks = [\"of\", \"behind\", \"the\", \"when\", \"what\", \"why\", \"who\",", "print((\"*\"*30 + \"\\n\"*5)) filtered = [] for line in re.sub(\"<.*?>\",", "\"newline\" in word: breaks += 1 beforeFirstBreak = False else:", "# v = self.generate(\"<verb>\", 1).strip() if random.randint(1, 100) < THEME_PROB:", "= newPoem.replace(\"newline ?\", \"? newline\") newPoem = newPoem.replace(\"newline !\", \"!", "= [] foundFirstBreak = False for word in poem.replace(\"\\n\", \"newline\").split():", "not in word: isgood = True for dontbreak in list(dontbreaks", "v = self.generate(\"<person>\", 1).strip() if \"pl\" in word: v =", "\"newline\"] capitalize = False breaks = 0 poem2 = []", "string = string + en.verb.past(v) + \" \" else: string", "= newPoem2 + \" <br />\\n\" newPoem2 = newPoem2 +", "v + \" \" return string def generatePretty(self, key, seed_str):", "secondLine = True if firstLine == False: firstLine = True", "newPoem = newPoem.replace(\"newline ,\", \", newline\") newPoem = newPoem.replace(\"newline\", \"\\n\")", "\"by\", \"like\", \"to\", \"you\", \"your\", \"a\", \"are\", \"become\", \"newline\"] capitalize", "= word.capitalize() capitalize = False for punc in list(set(puncuation)): if", "\" \" elif \"verb-past\" in word: string = string +", "firstLine = False secondLine = False for line in newPoem.split(\"\\n\"):", "filtered = [] for line in re.sub(\"<.*?>\", \" \", p).split(\"\\n\"):", "\".\", \"!\", \"?\"] dontbreaks = [\"of\", \"behind\", \"the\", \"when\", \"what\",", "poem3 = [] beforeFirstBreak = True for word in poem2:", "<br />\\n\" newPoem2 = newPoem2 + \"</p>\" return newPoem2,seed_str bnf", "+ en.verb.past(v) + \" \" else: string = string +", "if \"pl\" in word: v = en.noun.plural(v) string = string", "\"your\", \"by\", \"like\", \"to\", \"you\", \"your\", \"a\", \"are\", \"become\", \"newline\"]", "__future__ import absolute_import from __future__ import print_function import datetime import", "+ punc, punc) for punc in list(set(puncuation)): newPoem = newPoem.replace(\"", "= string + \\ en.verb.present_participle(v) + \" \" elif \"verb-pr\"", "[] beforeFirstBreak = True for word in poem2: if \"newline\"", "\" else: if \"verb\" in word and word != '<adverb>':", "= 0 else: i = random.randint(0, len(gram) - 1) string", "= self.generate(key, 1) poem = poem.replace(\" ,\", \",\") puncuation =", "bnf.generatePretty('<' + poemtype + '>',hex_seed) return p,seed_str if __name__ ==", "100) < 2 and \"newline\" not in word and foundFirstBreak:", "== False: firstLine = True newPoem2 = newPoem2 + line", "+ self.generate(word, 1) + \" \" else: if \"-pl\" in", "string + v + \" \" elif \"fruit\" in word:", "= newPoem.replace(\" a a\", \" an a\") newPoem = newPoem.replace(\"newline", "= newPoem.replace(\"..\", \".\") title = newPoem.split(\"\\n\")[0] newTitle = title.replace(\".\", \"\")", "True if firstLine == False: firstLine = True newPoem2 =", "os import random import sys import uuid import base64 import", "in word: if \"mushy\" in self.poemtype: v = self.generate(\"<padj>\",1) else:", "= [\"of\", \"behind\", \"the\", \"when\", \"what\", \"why\", \"who\", \",\", \"your\",", "= \" \".join(poem3) newPoem = newPoem.replace(\" a a\", \" an", "if __name__ == '__main__': poemtype = 'poem' if 'mushy' in", "\"noun\" in word: if \"pnoun\" in word or \"mushy\" in", "1).strip() # else: # v = self.generate(\"<verb>\", 1).strip() if random.randint(1,", "foundFirstBreak: isgood = True for dontbreak in list(dontbreaks + puncuation):", "\"pverb\" in word or \"mushy\" in self.poemtype: v = self.generate(\"<pverb>\",", "from __future__ import absolute_import from __future__ import print_function import datetime", "seed_str == None: seed_str = str(uuid.uuid4()).split(\"-\")[0] random.seed(uuid.uuid5(uuid.NAMESPACE_DNS,seed_str).int) #tool = language_check.LanguageTool('en-US')", "word == \"i\" or \"i'\" in word: word = word.capitalize()", "in self.poemtype: v = self.generate(\"<padj>\",1) else: if random.randint(1, 100) <", "= language_check.LanguageTool('en-US') self.poemtype = key if key == \"<mushypoem>\": key", "elif \"fruit\" in word: v = self.generate(\"<fruit>\", 1).strip() if \"pl\"", "word.capitalize() capitalize = False for punc in list(set(puncuation)): if punc", "\"pl\" in word: v = en.noun.plural(v) string = string +", "True if capitalize == True and \"newline\" not in word:", "word and word != '<adverb>': if \"pverb\" in word or", "word in poem2: if \"newline\" in word: breaks += 1", "hex_seed=None): p,seed_str = bnf.generatePretty('<' + poemtype + '>',hex_seed) return p,seed_str", "if \"verb-inf\" in word: string = string + \\ en.verb.present_participle(v)", "+ \" \" elif \"fruit\" in word: v = self.generate(\"<fruit>\",", "v = self.generate(word, 1).strip() string = string + v +", "generate_poem(poemtype, hex_seed=None): p,seed_str = bnf.generatePretty('<' + poemtype + '>',hex_seed) return", "newPoem.replace(\" a a\", \" an a\") newPoem = newPoem.replace(\"newline .\",", "False for punc in list(set(puncuation)): if punc in word: capitalize", "return p,seed_str if __name__ == '__main__': poemtype = 'poem' if", "+ \"</p>\" return newPoem2,seed_str bnf = bnfDictionary('brain.yaml') def generate_poem(poemtype, hex_seed=None):", "= 0 class bnfDictionary: def __init__(self, file): self.grammar = yaml.load(open(file,'r'))", "line + \" <br />\\n\" else: newPoem2 = newPoem2 +", "in newPoem.split(\"\\n\"): if len(line) > 0: if firstLine and not", "i = 0 else: i = random.randint(0, len(gram) - 1)", "def generate_poem(poemtype, hex_seed=None): p,seed_str = bnf.generatePretty('<' + poemtype + '>',hex_seed)", "= newPoem.replace(\" \" + punc, punc) for punc in list(set(puncuation)):", "= newPoem.replace(\" '\", \"'\") for punc in list(set(puncuation)): newPoem =", "\"become\", \"newline\"] capitalize = False breaks = 0 poem2 =", "newPoem.replace(\" \" + punc, punc) newPoem = newPoem.replace(\" ,\", \",\")", "else: if random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-adj>\", 1).strip()", "word: # noPunc = False # if noPunc: # poem3.append(random.choice(puncuation))", "in word: v = en.noun.plural(self.generate(word.replace(\"-pl\",\"\"),1)) else: v = self.generate(word, 1)", "import yaml import re try: import en except: print(\"DOWNLOD NODECUBE\")", "else: if \"-pl\" in word: v = en.noun.plural(self.generate(word.replace(\"-pl\",\"\"),1)) else: v", "string + \\ en.verb.present_participle(v) + \" \" elif \"verb-pr\" in", "= newPoem.replace(\"!.\", \"!\") newPoem = newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\",", "= en.noun.plural(v) string = string + self.generate(word, 1) + \"", "newline\") newPoem = newPoem.replace(\"newline !\", \"! newline\") newPoem = newPoem.replace(\"newline", "newTitle + \"</h1>\") newPoem2 = \"\" firstLine = False secondLine", "= bnf.generatePretty('<' + poemtype + '>',hex_seed) return p,seed_str if __name__", "poem3.append(random.choice(puncuation)) newPoem = \" \".join(poem3) newPoem = newPoem.replace(\" a a\",", "if \"pnoun\" in word or \"mushy\" in self.poemtype: v =", "capitalize = False else: if breaks > 1: capitalize =", "\"verb-past\" in word: string = string + en.verb.past(v) + \"", "\"nverb\" in word: v = self.generate(\"<nverb>\", 1).strip() # else: #", "\"\") newPoem = newPoem.replace(title, \"<h1>\" + newTitle + \"</h1>\") newPoem2", "random.randint(1, 100) < 2 and \"newline\" not in word and", "import print_function import datetime import os import random import sys", "\" <br />\\n\" else: newPoem2 = newPoem2 + \" <br", "\\n \", \"\\n\\n\") newPoem = newPoem.replace(\" '\", \"'\") for punc", "= newPoem.replace(\".?\", \".\") newPoem = newPoem.replace(\",.\", \",\") newPoem = newPoem.replace(\"!.\",", "firstLine and not secondLine: newPoem2 = newPoem2 + \"<p>\\n\" secondLine", "self.generate(\"<nverb>\", 1).strip() # else: # v = self.generate(\"<verb>\", 1).strip() if", "> 0: if firstLine and not secondLine: newPoem2 = newPoem2", "= newPoem.replace(\" ,\", \",\") newPoem = newPoem.replace(\"?.\", \"?\") newPoem =", "\" elif \"person\" in word: v = self.generate(\"<fruit>\", 1).strip() if", "= self.generate(\"<nnoun>\", 1).strip() else: v = self.generate(\"<noun>\", 1).strip() if random.randint(1,", "word or \"mushy\" in self.poemtype: v = self.generate(\"<pverb>\", 1).strip() elif", "+ puncuation): if dontbreak == word.lower(): isgood = False if", "word or \"mushy\" in self.poemtype: v = self.generate(\"<pnoun>\", 1).strip() elif", "elif \"verb-pr\" in word: string = string + \\ en.verb.present(", "if \"<\" not in word: string = string + word", "breaks > 1: capitalize = True if capitalize == True", "0 class bnfDictionary: def __init__(self, file): self.grammar = yaml.load(open(file,'r')) self.poemtype", "newPoem2 = newPoem2 + line + \" \\n\" if firstLine", "[\".\", \".\", \".\", \".\", \"!\", \"?\"] dontbreaks = [\"of\", \"behind\",", "newTitle = title.replace(\".\", \"\") newPoem = newPoem.replace(title, \"<h1>\" + newTitle", "\"\" if \"<\" not in gram[i]: string = gram[i] else:", "from __future__ import print_function import datetime import os import random", "/>\\n\" else: newPoem2 = newPoem2 + \" <br />\\n\" newPoem2", "\" elif \"fruit\" in word: v = self.generate(\"<fruit>\", 1).strip() if", "\".\") newPoem = newPoem.replace(\"..\", \".\") title = newPoem.split(\"\\n\")[0] newTitle =", "in sys.argv[1:]: poemtype = 'mushypoem' p,seed_str=generate_poem(poemtype) print((\"*\"*30 + \"\\n\"*5)) filtered", "newPoem.split(\"\\n\")[0] newTitle = title.replace(\".\", \"\") newPoem = newPoem.replace(title, \"<h1>\" +", "\\ en.verb.present_participle(v) + \" \" elif \"verb-pr\" in word: string", "\"\\n\\n\") newPoem = newPoem.replace(\" '\", \"'\") for punc in list(set(puncuation)):", "<br />\\n\" else: newPoem2 = newPoem2 + \" <br />\\n\"", "= self.generate(\"<theme-adj>\", 1).strip() else: v = self.generate(word, 1).strip() string =", "= 'poem' if 'mushy' in sys.argv[1:]: poemtype = 'mushypoem' p,seed_str=generate_poem(poemtype)", "bnf = bnfDictionary('brain.yaml') def generate_poem(poemtype, hex_seed=None): p,seed_str = bnf.generatePretty('<' +", "= True poem3.append(word) if random.randint(1, 100) < 0 and \"newline\"", "!= '<adverb>': if \"pverb\" in word or \"mushy\" in self.poemtype:", "= self.generate(\"<theme-verb>\", 1).strip() if \"verb-inf\" in word: string = string", "person=3, negate=False) + \" \" elif \"verb-past\" in word: string", "\"'\") for punc in list(set(puncuation)): newPoem = newPoem.replace(\" \" +", "= False for line in newPoem.split(\"\\n\"): if len(line) > 0:", "newPoem = newPoem.replace(\" a a\", \" an a\") newPoem =", "False else: if breaks > 1: capitalize = True if", "punc in list(set(puncuation)): if punc in word: capitalize = True", "sys import uuid import base64 import yaml import re try:", "= en.noun.plural(v) string = string + v + \" \"", "100) < THEME_PROB: v = self.generate(\"<theme-verb>\", 1).strip() if \"verb-inf\" in", "\", \"\\n\\n\") newPoem = newPoem.replace(\" '\", \"'\") for punc in", "foundFirstBreak = True poem3 = [] beforeFirstBreak = True for", "= \"<poem>\" def generate(self, key, num): gram = self.grammar[key] if", "VERSION = \"1.1\" THEME_PROB = 0 class bnfDictionary: def __init__(self,", "\" \" elif \"noun\" in word: if \"pnoun\" in word", "in word and foundFirstBreak: isgood = True for dontbreak in", "for punc in list(set(puncuation)): if punc in word: capitalize =", "= self.generate(\"<person>\", 1).strip() if \"pl\" in word: v = en.noun.plural(v)", "= True newPoem2 = newPoem2 + line + \" \\n\"", "= True # noPunc = True # for punc in", "# noPunc = False # if noPunc: # poem3.append(random.choice(puncuation)) newPoem", "for dontbreak in list(dontbreaks + puncuation): if dontbreak == word.lower():", "poem2 = [] foundFirstBreak = False for word in poem.replace(\"\\n\",", "= True poem3 = [] beforeFirstBreak = True for word", "= [] beforeFirstBreak = True for word in poem2: if", "v + \" \" elif \"fruit\" in word: v =", "string + v + \" \" elif \"noun\" in word:", "poem3.append(word) capitalize = False else: if breaks > 1: capitalize", "True for word in poem2: if \"newline\" in word: breaks", "isgood = False if isgood: poem2.append(\"newline\") if \"newline\" in word:", "+ v + \" \" return string def generatePretty(self, key,", "== word.lower(): isgood = False if isgood: poem3.append(random.choice(puncuation)) capitalize =", "key, num): gram = self.grammar[key] if len(gram)==1: i = 0", "\" \" elif \"fruit\" in word: v = self.generate(\"<fruit>\", 1).strip()", "?\", \"? newline\") newPoem = newPoem.replace(\"newline !\", \"! newline\") newPoem", "self.generate(key, 1) poem = poem.replace(\" ,\", \",\") puncuation = [\".\",", "v = self.generate(\"<padj>\",1) else: if random.randint(1, 100) < THEME_PROB: v", "1) poem = poem.replace(\" ,\", \",\") puncuation = [\".\", \".\",", "not in word and foundFirstBreak: isgood = True for dontbreak", "= True # for punc in list(set(puncuation)): # if punc", "re.sub(\"<.*?>\", \" \", p).split(\"\\n\"): if len(line.strip()) > 0: filtered.append(line.strip()) else:", "newPoem.replace(\" \\n \\n\", \"\\n\\n\") newPoem = newPoem.replace(\"\\n \\n \", \"\\n\\n\")", "THEME_PROB: v = self.generate(\"<theme-adj>\", 1).strip() else: v = self.generate(word, 1).strip()", "= string + v + \" \" elif \"adj\" in", "\".\") newPoem = newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\", \".\") title", "= newPoem.replace(\" \\n \\n\", \"\\n\\n\") newPoem = newPoem.replace(\"\\n \\n \",", "# for punc in list(set(puncuation)): # if punc in word:", "== \"<mushypoem>\": key = \"<poem>\" poem = self.generate(key, 1) poem", "None: seed_str = str(uuid.uuid4()).split(\"-\")[0] random.seed(uuid.uuid5(uuid.NAMESPACE_DNS,seed_str).int) #tool = language_check.LanguageTool('en-US') self.poemtype =", "+ \" \\n\" if firstLine and secondLine: newPoem2 = newPoem2", "line in re.sub(\"<.*?>\", \" \", p).split(\"\\n\"): if len(line.strip()) > 0:", "in word: word = word.capitalize() poem3.append(word) capitalize = False else:", "\"your\", \"a\", \"are\", \"become\", \"newline\"] capitalize = False breaks =", "+ \\ en.verb.present_participle(v) + \" \" elif \"verb-pr\" in word:", "punc, punc) newPoem = newPoem.replace(\" ,\", \",\") newPoem = newPoem.replace(\"?.\",", "a\") newPoem = newPoem.replace(\"newline .\", \". newline\") newPoem = newPoem.replace(\"newline", "if firstLine and secondLine: newPoem2 = newPoem2 + line +", "self.grammar = yaml.load(open(file,'r')) self.poemtype = \"<poem>\" def generate(self, key, num):", "random.randint(1, 100) < 0 and \"newline\" not in word: isgood", "= newPoem.replace(\",.\", \",\") newPoem = newPoem.replace(\"!.\", \"!\") newPoem = newPoem.replace(\"..\",", "v = en.noun.plural(v) string = string + v + \"", "self.generate(\"<nnoun>\", 1).strip() else: v = self.generate(\"<noun>\", 1).strip() if random.randint(1, 100)", "isgood = False if isgood: poem3.append(random.choice(puncuation)) capitalize = True #", "newPoem2 + \"</p>\" return newPoem2,seed_str bnf = bnfDictionary('brain.yaml') def generate_poem(poemtype,", "word in gram[i].split(): if \"<\" not in word: string =", "beforeFirstBreak = True for word in poem2: if \"newline\" in", "en except: print(\"DOWNLOD NODECUBE\") print(\"\"\"wget https://www.nodebox.net/code/data/media/linguistics.zip unzip linguistics.zip\"\"\") VERSION =", "len(gram)==1: i = 0 else: i = random.randint(0, len(gram) -", "False breaks = 0 poem2 = [] foundFirstBreak = False", "en.verb.past(v) + \" \" else: string = string + v", "\"\\n\"*5)) filtered = [] for line in re.sub(\"<.*?>\", \" \",", "in word: string = string + en.verb.past(v) + \" \"", "= string + self.generate(word, 1) + \" \" else: if", "dontbreak == word.lower(): isgood = False if isgood: poem3.append(random.choice(puncuation)) capitalize", "not in word: string = string + word + \"", "if random.randint(1, 100) < 0 and \"newline\" not in word:", "line in newPoem.split(\"\\n\"): if len(line) > 0: if firstLine and", "\",\") newPoem = newPoem.replace(\"!.\", \"!\") newPoem = newPoem.replace(\"..\", \".\") newPoem", "poem3.append(random.choice(puncuation)) capitalize = True # noPunc = True # for", "in self.poemtype: v = self.generate(\"<pnoun>\", 1).strip() elif \"nnoun\" in word:", "= \"<poem>\" poem = self.generate(key, 1) poem = poem.replace(\" ,\",", "1) string = \"\" if \"<\" not in gram[i]: string", "\" elif \"verb-pr\" in word: string = string + \\", "poem.replace(\" ,\", \",\") puncuation = [\".\", \".\", \".\", \".\", \"!\",", "word: v = en.noun.plural(v) string = string + self.generate(word, 1)", "+ poemtype + '>',hex_seed) return p,seed_str if __name__ == '__main__':", "= str(uuid.uuid4()).split(\"-\")[0] random.seed(uuid.uuid5(uuid.NAMESPACE_DNS,seed_str).int) #tool = language_check.LanguageTool('en-US') self.poemtype = key if", "puncuation = [\".\", \".\", \".\", \".\", \"!\", \"?\"] dontbreaks =", "\" \" elif \"adj\" in word: if \"mushy\" in self.poemtype:", "= True for dontbreak in list(dontbreaks + puncuation): if dontbreak", "True poem3.append(word) if random.randint(1, 100) < 0 and \"newline\" not", "elif \"adj\" in word: if \"mushy\" in self.poemtype: v =", "else: for word in gram[i].split(): if \"<\" not in word:", "newPoem = \" \".join(poem3) newPoem = newPoem.replace(\" a a\", \"", "\"</p>\" return newPoem2,seed_str bnf = bnfDictionary('brain.yaml') def generate_poem(poemtype, hex_seed=None): p,seed_str", "+ \" \" elif \"verb-pr\" in word: string = string", "newPoem.replace(\"newline ?\", \"? newline\") newPoem = newPoem.replace(\"newline !\", \"! newline\")", "100) < THEME_PROB: v = self.generate(\"<theme-noun>\", 1).strip() if \"pl\" in", "= string + v + \" \" elif \"noun\" in", "1 beforeFirstBreak = False else: breaks = 0 if beforeFirstBreak", "\" \" return string def generatePretty(self, key, seed_str): if seed_str", "False # if noPunc: # poem3.append(random.choice(puncuation)) newPoem = \" \".join(poem3)", "yaml.load(open(file,'r')) self.poemtype = \"<poem>\" def generate(self, key, num): gram =", "capitalize = False for punc in list(set(puncuation)): if punc in", "word: string = string + \\ en.verb.present_participle(v) + \" \"", "newline\") newPoem = newPoem.replace(\"newline ?\", \"? newline\") newPoem = newPoem.replace(\"newline", "self.poemtype: v = self.generate(\"<padj>\",1) else: if random.randint(1, 100) < THEME_PROB:", "absolute_import from __future__ import print_function import datetime import os import", "\\ en.verb.present( v, person=3, negate=False) + \" \" elif \"verb-past\"", "key, seed_str): if seed_str == None: seed_str = str(uuid.uuid4()).split(\"-\")[0] random.seed(uuid.uuid5(uuid.NAMESPACE_DNS,seed_str).int)", "punc) newPoem = newPoem.replace(\" ,\", \",\") newPoem = newPoem.replace(\"?.\", \"?\")", "\" \" else: if \"-pl\" in word: v = en.noun.plural(self.generate(word.replace(\"-pl\",\"\"),1))", "newPoem.replace(\" \" + punc, punc) for punc in list(set(puncuation)): newPoem", "= newPoem.replace(\"newline !\", \"! newline\") newPoem = newPoem.replace(\"newline ,\", \",", "\" + punc, punc) newPoem = newPoem.replace(\" ,\", \",\") newPoem", "1).strip() if \"pl\" in word: v = en.noun.plural(v) string =", "= string + v + \" \" elif \"fruit\" in", "breaks = 0 if beforeFirstBreak or word == \"i\" or", "dontbreak == word.lower(): isgood = False if isgood: poem2.append(\"newline\") if", "list(set(puncuation)): if punc in word: capitalize = True poem3.append(word) if", "v = self.generate(\"<theme-adj>\", 1).strip() else: v = self.generate(word, 1).strip() string", "in self.poemtype: v = self.generate(\"<pverb>\", 1).strip() elif \"nverb\" in word:", "= False for word in poem.replace(\"\\n\", \"newline\").split(): poem2.append(word.lower()) if random.randint(1,", "\"are\", \"become\", \"newline\"] capitalize = False breaks = 0 poem2", "in gram[i].split(): if \"<\" not in word: string = string", "bnfDictionary: def __init__(self, file): self.grammar = yaml.load(open(file,'r')) self.poemtype = \"<poem>\"", "in word: v = self.generate(\"<fruit>\", 1).strip() if \"pl\" in word:", "word: v = en.noun.plural(v) string = string + v +", "if dontbreak == word.lower(): isgood = False if isgood: poem3.append(random.choice(puncuation))", "else: v = self.generate(word, 1).strip() string = string + v", "if capitalize == True and \"newline\" not in word: word", "# else: # v = self.generate(\"<verb>\", 1).strip() if random.randint(1, 100)", "\"nnoun\" in word: v = self.generate(\"<nnoun>\", 1).strip() else: v =", "word = word.capitalize() capitalize = False for punc in list(set(puncuation)):", "v = en.noun.plural(self.generate(word.replace(\"-pl\",\"\"),1)) else: v = self.generate(word, 1) string =", "+ \" <br />\\n\" newPoem2 = newPoem2 + \"</p>\" return", "negate=False) + \" \" elif \"verb-past\" in word: string =", "\" else: string = string + v + \" \"", "newPoem.replace(\"newline ,\", \", newline\") newPoem = newPoem.replace(\"newline\", \"\\n\") newPoem =", "gram[i]: string = gram[i] else: for word in gram[i].split(): if", "newPoem2 + line + \" \\n\" if firstLine and secondLine:", "in re.sub(\"<.*?>\", \" \", p).split(\"\\n\"): if len(line.strip()) > 0: filtered.append(line.strip())", "except: print(\"DOWNLOD NODECUBE\") print(\"\"\"wget https://www.nodebox.net/code/data/media/linguistics.zip unzip linguistics.zip\"\"\") VERSION = \"1.1\"", "\"fruit\" in word: v = self.generate(\"<fruit>\", 1).strip() if \"pl\" in", "newPoem = newPoem.replace(\"..\", \".\") title = newPoem.split(\"\\n\")[0] newTitle = title.replace(\".\",", "p,seed_str=generate_poem(poemtype) print((\"*\"*30 + \"\\n\"*5)) filtered = [] for line in", "in word: word = word.capitalize() capitalize = False for punc", "+ \" \" elif \"verb-past\" in word: string = string", "for line in newPoem.split(\"\\n\"): if len(line) > 0: if firstLine", "newPoem.replace(\"newline .\", \". newline\") newPoem = newPoem.replace(\"newline ?\", \"? newline\")", "= random.randint(0, len(gram) - 1) string = \"\" if \"<\"", "= newPoem2 + \"</p>\" return newPoem2,seed_str bnf = bnfDictionary('brain.yaml') def", "seed_str = str(uuid.uuid4()).split(\"-\")[0] random.seed(uuid.uuid5(uuid.NAMESPACE_DNS,seed_str).int) #tool = language_check.LanguageTool('en-US') self.poemtype = key", "newPoem.replace(\"!.\", \"!\") newPoem = newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\", \".\")", "newPoem = newPoem.replace(\"newline .\", \". newline\") newPoem = newPoem.replace(\"newline ?\",", "'>',hex_seed) return p,seed_str if __name__ == '__main__': poemtype = 'poem'", "= 'mushypoem' p,seed_str=generate_poem(poemtype) print((\"*\"*30 + \"\\n\"*5)) filtered = [] for", "print(\"\"\"wget https://www.nodebox.net/code/data/media/linguistics.zip unzip linguistics.zip\"\"\") VERSION = \"1.1\" THEME_PROB = 0", "unzip linguistics.zip\"\"\") VERSION = \"1.1\" THEME_PROB = 0 class bnfDictionary:", "= \"\" if \"<\" not in gram[i]: string = gram[i]", "= gram[i] else: for word in gram[i].split(): if \"<\" not", "in word: v = self.generate(\"<nverb>\", 1).strip() # else: # v", ",\", \",\") newPoem = newPoem.replace(\"?.\", \"?\") newPoem = newPoem.replace(\".?\", \".\")", "breaks = 0 poem2 = [] foundFirstBreak = False for", "\" \" else: string = string + v + \"", "1) + \" \" elif \"person\" in word: v =", "= newPoem.replace(\"newline .\", \". newline\") newPoem = newPoem.replace(\"newline ?\", \"?", "\"newline\").split(): poem2.append(word.lower()) if random.randint(1, 100) < 2 and \"newline\" not", "newPoem.split(\"\\n\"): if len(line) > 0: if firstLine and not secondLine:", "= string + \\ en.verb.present( v, person=3, negate=False) + \"", "poemtype = 'mushypoem' p,seed_str=generate_poem(poemtype) print((\"*\"*30 + \"\\n\"*5)) filtered = []", "== None: seed_str = str(uuid.uuid4()).split(\"-\")[0] random.seed(uuid.uuid5(uuid.NAMESPACE_DNS,seed_str).int) #tool = language_check.LanguageTool('en-US') self.poemtype", "= newPoem.split(\"\\n\")[0] newTitle = title.replace(\".\", \"\") newPoem = newPoem.replace(title, \"<h1>\"", "\"why\", \"who\", \",\", \"your\", \"by\", \"like\", \"to\", \"you\", \"your\", \"a\",", "v = self.generate(\"<theme-verb>\", 1).strip() if \"verb-inf\" in word: string =", "and \"newline\" not in word: isgood = True for dontbreak", "# if noPunc: # poem3.append(random.choice(puncuation)) newPoem = \" \".join(poem3) newPoem", "newPoem2 = \"\" firstLine = False secondLine = False for", "in poem2: if \"newline\" in word: breaks += 1 beforeFirstBreak", "if punc in word: capitalize = True poem3.append(word) if random.randint(1,", "file): self.grammar = yaml.load(open(file,'r')) self.poemtype = \"<poem>\" def generate(self, key,", "= True if firstLine == False: firstLine = True newPoem2", "word: string = string + en.verb.past(v) + \" \" else:", "a\", \" an a\") newPoem = newPoem.replace(\"newline .\", \". newline\")", "\"mushy\" in self.poemtype: v = self.generate(\"<pverb>\", 1).strip() elif \"nverb\" in", "import absolute_import from __future__ import print_function import datetime import os", "if firstLine and not secondLine: newPoem2 = newPoem2 + \"<p>\\n\"", "+ \" \" return string def generatePretty(self, key, seed_str): if", "= string + en.verb.past(v) + \" \" else: string =", "word: breaks += 1 beforeFirstBreak = False else: breaks =", "\"\\n\") newPoem = newPoem.replace(\" \\n \\n\", \"\\n\\n\") newPoem = newPoem.replace(\"\\n", "\"newline\" not in word and foundFirstBreak: isgood = True for", "and \"newline\" not in word: word = word.capitalize() capitalize =", "in list(set(puncuation)): # if punc in word: # noPunc =", "poem2.append(word.lower()) if random.randint(1, 100) < 2 and \"newline\" not in", "\"what\", \"why\", \"who\", \",\", \"your\", \"by\", \"like\", \"to\", \"you\", \"your\",", "= self.generate(word, 1) string = string + v + \"", "\".\") newPoem = newPoem.replace(\",.\", \",\") newPoem = newPoem.replace(\"!.\", \"!\") newPoem", "word: capitalize = True poem3.append(word) if random.randint(1, 100) < 0", "generate(self, key, num): gram = self.grammar[key] if len(gram)==1: i =", "[] for line in re.sub(\"<.*?>\", \" \", p).split(\"\\n\"): if len(line.strip())", "+ \" \" elif \"person\" in word: v = self.generate(\"<fruit>\",", "for punc in list(set(puncuation)): # if punc in word: #", "self.generate(word, 1).strip() string = string + v + \" \"", "poem3.append(word) if random.randint(1, 100) < 0 and \"newline\" not in", "v, person=3, negate=False) + \" \" elif \"verb-past\" in word:", "v + \" \" elif \"person\" in word: v =", "= string + v + \" \" return string def", "v = self.generate(\"<nverb>\", 1).strip() # else: # v = self.generate(\"<verb>\",", "v = self.generate(\"<verb>\", 1).strip() if random.randint(1, 100) < THEME_PROB: v", "< THEME_PROB: v = self.generate(\"<theme-verb>\", 1).strip() if \"verb-inf\" in word:", "\" elif \"verb-past\" in word: string = string + en.verb.past(v)", "+ \" \" else: string = string + v +", "else: string = string + v + \" \" elif", "poem = poem.replace(\" ,\", \",\") puncuation = [\".\", \".\", \".\",", "not in word: word = word.capitalize() capitalize = False for", "1) string = string + v + \" \" return", "if dontbreak == word.lower(): isgood = False if isgood: poem2.append(\"newline\")", "= newPoem.replace(\"newline\", \"\\n\") newPoem = newPoem.replace(\" \\n \\n\", \"\\n\\n\") newPoem", "= self.generate(\"<pverb>\", 1).strip() elif \"nverb\" in word: v = self.generate(\"<nverb>\",", "if beforeFirstBreak or word == \"i\" or \"i'\" in word:", "= newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\",", "isgood: poem3.append(random.choice(puncuation)) capitalize = True # noPunc = True #", "v = self.generate(\"<pnoun>\", 1).strip() elif \"nnoun\" in word: v =", "\"! newline\") newPoem = newPoem.replace(\"newline ,\", \", newline\") newPoem =", "\".join(poem3) newPoem = newPoem.replace(\" a a\", \" an a\") newPoem", "elif \"person\" in word: v = self.generate(\"<person>\", 1).strip() if \"pl\"", "string + v + \" \" return string def generatePretty(self,", "= newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\", \".\") title = newPoem.split(\"\\n\")[0]", "word + \" \" else: if \"verb\" in word and", "newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\", \".\")", "return newPoem2,seed_str bnf = bnfDictionary('brain.yaml') def generate_poem(poemtype, hex_seed=None): p,seed_str =", "else: breaks = 0 if beforeFirstBreak or word == \"i\"", "self.generate(word, 1) + \" \" elif \"person\" in word: v", "import uuid import base64 import yaml import re try: import", "newPoem = newPoem.replace(\" \\n \\n\", \"\\n\\n\") newPoem = newPoem.replace(\"\\n \\n", "newPoem2 + \"<p>\\n\" secondLine = True if firstLine == False:", "capitalize = False breaks = 0 poem2 = [] foundFirstBreak", "poem2.append(\"newline\") if \"newline\" in word: foundFirstBreak = True poem3 =", "!\", \"! newline\") newPoem = newPoem.replace(\"newline ,\", \", newline\") newPoem", "0: if firstLine and not secondLine: newPoem2 = newPoem2 +", "NODECUBE\") print(\"\"\"wget https://www.nodebox.net/code/data/media/linguistics.zip unzip linguistics.zip\"\"\") VERSION = \"1.1\" THEME_PROB =", "THEME_PROB: v = self.generate(\"<theme-noun>\", 1).strip() if \"pl\" in word: v", "not secondLine: newPoem2 = newPoem2 + \"<p>\\n\" secondLine = True", "dontbreak in list(dontbreaks + puncuation): if dontbreak == word.lower(): isgood", "punc in list(set(puncuation)): # if punc in word: # noPunc", "\",\") newPoem = newPoem.replace(\"?.\", \"?\") newPoem = newPoem.replace(\".?\", \".\") newPoem", "\"pnoun\" in word or \"mushy\" in self.poemtype: v = self.generate(\"<pnoun>\",", "yaml import re try: import en except: print(\"DOWNLOD NODECUBE\") print(\"\"\"wget", "elif \"verb-past\" in word: string = string + en.verb.past(v) +", "2 and \"newline\" not in word and foundFirstBreak: isgood =", "= True for word in poem2: if \"newline\" in word:", "en.verb.present_participle(v) + \" \" elif \"verb-pr\" in word: string =", "list(set(puncuation)): # if punc in word: # noPunc = False", "list(set(puncuation)): newPoem = newPoem.replace(\" \" + punc, punc) for punc", "> 1: capitalize = True if capitalize == True and", "puncuation): if dontbreak == word.lower(): isgood = False if isgood:", "\"!\", \"?\"] dontbreaks = [\"of\", \"behind\", \"the\", \"when\", \"what\", \"why\",", "newPoem2 = newPoem2 + \" <br />\\n\" newPoem2 = newPoem2", "\\n\", \"\\n\\n\") newPoem = newPoem.replace(\"\\n \\n \", \"\\n\\n\") newPoem =", "string = string + \\ en.verb.present( v, person=3, negate=False) +", "\"verb-inf\" in word: string = string + \\ en.verb.present_participle(v) +", "= self.generate(\"<theme-noun>\", 1).strip() if \"pl\" in word: v = en.noun.plural(v)", "self.generate(\"<theme-noun>\", 1).strip() if \"pl\" in word: v = en.noun.plural(v) string", "\\n\" if firstLine and secondLine: newPoem2 = newPoem2 + line", "THEME_PROB = 0 class bnfDictionary: def __init__(self, file): self.grammar =", "False for word in poem.replace(\"\\n\", \"newline\").split(): poem2.append(word.lower()) if random.randint(1, 100)", "newPoem = newPoem.replace(\".?\", \".\") newPoem = newPoem.replace(\",.\", \",\") newPoem =", "\".\", \".\", \".\", \"!\", \"?\"] dontbreaks = [\"of\", \"behind\", \"the\",", "random import sys import uuid import base64 import yaml import", "word in poem.replace(\"\\n\", \"newline\").split(): poem2.append(word.lower()) if random.randint(1, 100) < 2", "p,seed_str if __name__ == '__main__': poemtype = 'poem' if 'mushy'", "self.generate(\"<theme-adj>\", 1).strip() else: v = self.generate(word, 1).strip() string = string", "newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\", \".\") title = newPoem.split(\"\\n\")[0] newTitle", "gram = self.grammar[key] if len(gram)==1: i = 0 else: i", "import os import random import sys import uuid import base64", "= False secondLine = False for line in newPoem.split(\"\\n\"): if", "\"<p>\\n\" secondLine = True if firstLine == False: firstLine =", "or \"mushy\" in self.poemtype: v = self.generate(\"<pnoun>\", 1).strip() elif \"nnoun\"", "\". newline\") newPoem = newPoem.replace(\"newline ?\", \"? newline\") newPoem =", "for line in re.sub(\"<.*?>\", \" \", p).split(\"\\n\"): if len(line.strip()) >", "noPunc = False # if noPunc: # poem3.append(random.choice(puncuation)) newPoem =", "string + self.generate(word, 1) + \" \" elif \"person\" in", "\" \" elif \"person\" in word: v = self.generate(\"<fruit>\", 1).strip()", "elif \"nnoun\" in word: v = self.generate(\"<nnoun>\", 1).strip() else: v", "+= 1 beforeFirstBreak = False else: breaks = 0 if", "+ self.generate(word, 1) + \" \" elif \"person\" in word:", "datetime import os import random import sys import uuid import", "key == \"<mushypoem>\": key = \"<poem>\" poem = self.generate(key, 1)", "\" \\n\" if firstLine and secondLine: newPoem2 = newPoem2 +", "else: # v = self.generate(\"<verb>\", 1).strip() if random.randint(1, 100) <", "= string + v + \" \" elif \"person\" in", "key = \"<poem>\" poem = self.generate(key, 1) poem = poem.replace(\"", "= False if isgood: poem3.append(random.choice(puncuation)) capitalize = True # noPunc", "newPoem = newPoem.replace(\"newline ?\", \"? newline\") newPoem = newPoem.replace(\"newline !\",", "if \"<\" not in gram[i]: string = gram[i] else: for", "word: v = self.generate(\"<nnoun>\", 1).strip() else: v = self.generate(\"<noun>\", 1).strip()", "in word or \"mushy\" in self.poemtype: v = self.generate(\"<pnoun>\", 1).strip()", "True and \"newline\" not in word: word = word.capitalize() capitalize", "+ line + \" <br />\\n\" else: newPoem2 = newPoem2", "if random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-noun>\", 1).strip() if", "newPoem2 + \" <br />\\n\" newPoem2 = newPoem2 + \"</p>\"", "self.generate(\"<verb>\", 1).strip() if random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-verb>\",", "newPoem = newPoem.replace(\"?.\", \"?\") newPoem = newPoem.replace(\".?\", \".\") newPoem =", "capitalize == True and \"newline\" not in word: word =", "punc in list(set(puncuation)): newPoem = newPoem.replace(\" \" + punc, punc)", "1).strip() string = string + v + \" \" elif", "# if punc in word: # noPunc = False #", "= poem.replace(\" ,\", \",\") puncuation = [\".\", \".\", \".\", \".\",", "if random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-verb>\", 1).strip() if", "== word.lower(): isgood = False if isgood: poem2.append(\"newline\") if \"newline\"", "string + self.generate(word, 1) + \" \" else: if \"-pl\"", "string def generatePretty(self, key, seed_str): if seed_str == None: seed_str", "'mushypoem' p,seed_str=generate_poem(poemtype) print((\"*\"*30 + \"\\n\"*5)) filtered = [] for line", "0 else: i = random.randint(0, len(gram) - 1) string =", "v = self.generate(\"<pverb>\", 1).strip() elif \"nverb\" in word: v =", "beforeFirstBreak or word == \"i\" or \"i'\" in word: word", "== True and \"newline\" not in word: word = word.capitalize()", "= newPoem.replace(\" \" + punc, punc) newPoem = newPoem.replace(\" ,\",", "\"a\", \"are\", \"become\", \"newline\"] capitalize = False breaks = 0", "\"<poem>\" poem = self.generate(key, 1) poem = poem.replace(\" ,\", \",\")", "if 'mushy' in sys.argv[1:]: poemtype = 'mushypoem' p,seed_str=generate_poem(poemtype) print((\"*\"*30 +", "\".\") title = newPoem.split(\"\\n\")[0] newTitle = title.replace(\".\", \"\") newPoem =", "__init__(self, file): self.grammar = yaml.load(open(file,'r')) self.poemtype = \"<poem>\" def generate(self,", "\" elif \"adj\" in word: if \"mushy\" in self.poemtype: v", "/>\\n\" newPoem2 = newPoem2 + \"</p>\" return newPoem2,seed_str bnf =", "1).strip() if random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-noun>\", 1).strip()", "newPoem.replace(\"newline\", \"\\n\") newPoem = newPoem.replace(\" \\n \\n\", \"\\n\\n\") newPoem =", "secondLine: newPoem2 = newPoem2 + \"<p>\\n\" secondLine = True if", "string = string + v + \" \" elif \"noun\"", "not in gram[i]: string = gram[i] else: for word in", "+ v + \" \" elif \"noun\" in word: if", "word: isgood = True for dontbreak in list(dontbreaks + puncuation):", "if \"verb\" in word and word != '<adverb>': if \"pverb\"", "word: string = string + \\ en.verb.present( v, person=3, negate=False)", "\" \" else: if \"verb\" in word and word !=", "\"person\" in word: v = self.generate(\"<fruit>\", 1).strip() if \"pl\" in", "newPoem.replace(\".?\", \".\") newPoem = newPoem.replace(\",.\", \",\") newPoem = newPoem.replace(\"!.\", \"!\")", "\" <br />\\n\" newPoem2 = newPoem2 + \"</p>\" return newPoem2,seed_str", "word: v = self.generate(\"<fruit>\", 1).strip() if \"pl\" in word: v", "False: firstLine = True newPoem2 = newPoem2 + line +", "1).strip() else: v = self.generate(\"<noun>\", 1).strip() if random.randint(1, 100) <", "or word == \"i\" or \"i'\" in word: word =", "\"-pl\" in word: v = en.noun.plural(self.generate(word.replace(\"-pl\",\"\"),1)) else: v = self.generate(word,", "= en.noun.plural(self.generate(word.replace(\"-pl\",\"\"),1)) else: v = self.generate(word, 1) string = string", "in word: foundFirstBreak = True poem3 = [] beforeFirstBreak =", "breaks += 1 beforeFirstBreak = False else: breaks = 0", "in word: string = string + word + \" \"", "self.generate(\"<noun>\", 1).strip() if random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-noun>\",", "= [\".\", \".\", \".\", \".\", \"!\", \"?\"] dontbreaks = [\"of\",", "title.replace(\".\", \"\") newPoem = newPoem.replace(title, \"<h1>\" + newTitle + \"</h1>\")", "newPoem2 = newPoem2 + line + \" <br />\\n\" else:", "100) < THEME_PROB: v = self.generate(\"<theme-adj>\", 1).strip() else: v =", "if \"newline\" in word: foundFirstBreak = True poem3 = []", "list(set(puncuation)): newPoem = newPoem.replace(\" \" + punc, punc) newPoem =", "= \"1.1\" THEME_PROB = 0 class bnfDictionary: def __init__(self, file):", "1).strip() if random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-verb>\", 1).strip()", "else: v = self.generate(word, 1) string = string + v", "capitalize = True # noPunc = True # for punc", "= newPoem2 + \"<p>\\n\" secondLine = True if firstLine ==", "newPoem2 + line + \" <br />\\n\" else: newPoem2 =", "if firstLine == False: firstLine = True newPoem2 = newPoem2", "\", newline\") newPoem = newPoem.replace(\"newline\", \"\\n\") newPoem = newPoem.replace(\" \\n", "a a\", \" an a\") newPoem = newPoem.replace(\"newline .\", \".", "else: if \"verb\" in word and word != '<adverb>': if", "False if isgood: poem2.append(\"newline\") if \"newline\" in word: foundFirstBreak =", "and \"newline\" not in word and foundFirstBreak: isgood = True", "\".\", \".\", \"!\", \"?\"] dontbreaks = [\"of\", \"behind\", \"the\", \"when\",", "import en except: print(\"DOWNLOD NODECUBE\") print(\"\"\"wget https://www.nodebox.net/code/data/media/linguistics.zip unzip linguistics.zip\"\"\") VERSION", "linguistics.zip\"\"\") VERSION = \"1.1\" THEME_PROB = 0 class bnfDictionary: def", "for word in poem.replace(\"\\n\", \"newline\").split(): poem2.append(word.lower()) if random.randint(1, 100) <", "\"newline\" in word: foundFirstBreak = True poem3 = [] beforeFirstBreak", "word: v = self.generate(\"<person>\", 1).strip() if \"pl\" in word: v", ",\", \",\") puncuation = [\".\", \".\", \".\", \".\", \"!\", \"?\"]", "p,seed_str = bnf.generatePretty('<' + poemtype + '>',hex_seed) return p,seed_str if", "secondLine: newPoem2 = newPoem2 + line + \" <br />\\n\"", "= newPoem2 + line + \" <br />\\n\" else: newPoem2", "if \"-pl\" in word: v = en.noun.plural(self.generate(word.replace(\"-pl\",\"\"),1)) else: v =", "word != '<adverb>': if \"pverb\" in word or \"mushy\" in", "True newPoem2 = newPoem2 + line + \" \\n\" if", "self.generate(\"<fruit>\", 1).strip() if \"pl\" in word: v = en.noun.plural(v) string", "1) + \" \" else: if \"-pl\" in word: v", "in word and word != '<adverb>': if \"pverb\" in word", "v = self.generate(word, 1) string = string + v +", "string = string + word + \" \" else: if", "= self.generate(\"<fruit>\", 1).strip() if \"pl\" in word: v = en.noun.plural(v)", "random.randint(0, len(gram) - 1) string = \"\" if \"<\" not", "if random.randint(1, 100) < 2 and \"newline\" not in word", "100) < 0 and \"newline\" not in word: isgood =", "newPoem.replace(\"newline !\", \"! newline\") newPoem = newPoem.replace(\"newline ,\", \", newline\")", "newPoem2 = newPoem2 + \"</p>\" return newPoem2,seed_str bnf = bnfDictionary('brain.yaml')", "in word: capitalize = True poem3.append(word) if random.randint(1, 100) <", "or \"mushy\" in self.poemtype: v = self.generate(\"<pverb>\", 1).strip() elif \"nverb\"", "i = random.randint(0, len(gram) - 1) string = \"\" if", "if isgood: poem3.append(random.choice(puncuation)) capitalize = True # noPunc = True", ",\", \", newline\") newPoem = newPoem.replace(\"newline\", \"\\n\") newPoem = newPoem.replace(\"", "= self.generate(\"<noun>\", 1).strip() if random.randint(1, 100) < THEME_PROB: v =", "firstLine and secondLine: newPoem2 = newPoem2 + line + \"", "\"<\" not in gram[i]: string = gram[i] else: for word", "newPoem = newPoem.replace(title, \"<h1>\" + newTitle + \"</h1>\") newPoem2 =", "if isgood: poem2.append(\"newline\") if \"newline\" in word: foundFirstBreak = True", "= bnfDictionary('brain.yaml') def generate_poem(poemtype, hex_seed=None): p,seed_str = bnf.generatePretty('<' + poemtype", "string = string + v + \" \" elif \"fruit\"", "< THEME_PROB: v = self.generate(\"<theme-adj>\", 1).strip() else: v = self.generate(word,", "language_check.LanguageTool('en-US') self.poemtype = key if key == \"<mushypoem>\": key =", "= False if isgood: poem2.append(\"newline\") if \"newline\" in word: foundFirstBreak", "True poem3 = [] beforeFirstBreak = True for word in", "self.poemtype = key if key == \"<mushypoem>\": key = \"<poem>\"", "list(dontbreaks + puncuation): if dontbreak == word.lower(): isgood = False", "elif \"nverb\" in word: v = self.generate(\"<nverb>\", 1).strip() # else:", "newPoem2,seed_str bnf = bnfDictionary('brain.yaml') def generate_poem(poemtype, hex_seed=None): p,seed_str = bnf.generatePretty('<'", "newPoem = newPoem.replace(\" ,\", \",\") newPoem = newPoem.replace(\"?.\", \"?\") newPoem", "+ \" <br />\\n\" else: newPoem2 = newPoem2 + \"", "- 1) string = \"\" if \"<\" not in gram[i]:", "en.noun.plural(v) string = string + self.generate(word, 1) + \" \"", "dontbreaks = [\"of\", \"behind\", \"the\", \"when\", \"what\", \"why\", \"who\", \",\",", "if \"newline\" in word: breaks += 1 beforeFirstBreak = False", "import random import sys import uuid import base64 import yaml", "self.generate(\"<pnoun>\", 1).strip() elif \"nnoun\" in word: v = self.generate(\"<nnoun>\", 1).strip()", "in word: if \"pnoun\" in word or \"mushy\" in self.poemtype:", "__future__ import print_function import datetime import os import random import", "if \"mushy\" in self.poemtype: v = self.generate(\"<padj>\",1) else: if random.randint(1,", "= False # if noPunc: # poem3.append(random.choice(puncuation)) newPoem = \"", "or \"i'\" in word: word = word.capitalize() poem3.append(word) capitalize =", "if punc in word: # noPunc = False # if", "\" an a\") newPoem = newPoem.replace(\"newline .\", \". newline\") newPoem", "\"<poem>\" def generate(self, key, num): gram = self.grammar[key] if len(gram)==1:", "\"newline\" not in word: isgood = True for dontbreak in", "self.grammar[key] if len(gram)==1: i = 0 else: i = random.randint(0,", "try: import en except: print(\"DOWNLOD NODECUBE\") print(\"\"\"wget https://www.nodebox.net/code/data/media/linguistics.zip unzip linguistics.zip\"\"\")", "\",\") puncuation = [\".\", \".\", \".\", \".\", \"!\", \"?\"] dontbreaks", "in list(set(puncuation)): newPoem = newPoem.replace(\" \" + punc, punc) newPoem", "= False else: breaks = 0 if beforeFirstBreak or word", "string + v + \" \" elif \"adj\" in word:", "generatePretty(self, key, seed_str): if seed_str == None: seed_str = str(uuid.uuid4()).split(\"-\")[0]", "self.generate(\"<theme-verb>\", 1).strip() if \"verb-inf\" in word: string = string +", "string = gram[i] else: for word in gram[i].split(): if \"<\"", "string = \"\" if \"<\" not in gram[i]: string =", "if breaks > 1: capitalize = True if capitalize ==", "\"who\", \",\", \"your\", \"by\", \"like\", \"to\", \"you\", \"your\", \"a\", \"are\",", "word: word = word.capitalize() poem3.append(word) capitalize = False else: if", "= True if capitalize == True and \"newline\" not in", "\"person\" in word: v = self.generate(\"<person>\", 1).strip() if \"pl\" in", "uuid import base64 import yaml import re try: import en", "if seed_str == None: seed_str = str(uuid.uuid4()).split(\"-\")[0] random.seed(uuid.uuid5(uuid.NAMESPACE_DNS,seed_str).int) #tool =", "self.poemtype: v = self.generate(\"<pverb>\", 1).strip() elif \"nverb\" in word: v", "\"to\", \"you\", \"your\", \"a\", \"are\", \"become\", \"newline\"] capitalize = False", "in list(dontbreaks + puncuation): if dontbreak == word.lower(): isgood =", "= newPoem2 + line + \" \\n\" if firstLine and", "< 2 and \"newline\" not in word and foundFirstBreak: isgood", "= word.capitalize() poem3.append(word) capitalize = False else: if breaks >", "def generate(self, key, num): gram = self.grammar[key] if len(gram)==1: i", "punc in word: capitalize = True poem3.append(word) if random.randint(1, 100)", "string = string + \\ en.verb.present_participle(v) + \" \" elif", "1).strip() elif \"nnoun\" in word: v = self.generate(\"<nnoun>\", 1).strip() else:", "self.generate(\"<person>\", 1).strip() if \"pl\" in word: v = en.noun.plural(v) string", "\"mushy\" in self.poemtype: v = self.generate(\"<padj>\",1) else: if random.randint(1, 100)", "\" \" elif \"verb-pr\" in word: string = string +", "\"when\", \"what\", \"why\", \"who\", \",\", \"your\", \"by\", \"like\", \"to\", \"you\",", "newPoem.replace(\",.\", \",\") newPoem = newPoem.replace(\"!.\", \"!\") newPoem = newPoem.replace(\"..\", \".\")", "+ line + \" \\n\" if firstLine and secondLine: newPoem2", "noPunc = True # for punc in list(set(puncuation)): # if", "and word != '<adverb>': if \"pverb\" in word or \"mushy\"", "\"you\", \"your\", \"a\", \"are\", \"become\", \"newline\"] capitalize = False breaks", "\"mushy\" in self.poemtype: v = self.generate(\"<pnoun>\", 1).strip() elif \"nnoun\" in", "+ v + \" \" elif \"fruit\" in word: v", "noPunc: # poem3.append(random.choice(puncuation)) newPoem = \" \".join(poem3) newPoem = newPoem.replace(\"", "+ \" \" else: if \"-pl\" in word: v =", "sys.argv[1:]: poemtype = 'mushypoem' p,seed_str=generate_poem(poemtype) print((\"*\"*30 + \"\\n\"*5)) filtered =", "elif \"person\" in word: v = self.generate(\"<fruit>\", 1).strip() if \"pl\"", "string = string + v + \" \" return string", "if len(gram)==1: i = 0 else: i = random.randint(0, len(gram)", "in word or \"mushy\" in self.poemtype: v = self.generate(\"<pverb>\", 1).strip()", "False if isgood: poem3.append(random.choice(puncuation)) capitalize = True # noPunc =", "import re try: import en except: print(\"DOWNLOD NODECUBE\") print(\"\"\"wget https://www.nodebox.net/code/data/media/linguistics.zip", "punc, punc) for punc in list(set(puncuation)): newPoem = newPoem.replace(\" \"", "string = string + v + \" \" elif \"adj\"", "0 and \"newline\" not in word: isgood = True for", "newPoem = newPoem.replace(\" '\", \"'\") for punc in list(set(puncuation)): newPoem", "\"the\", \"when\", \"what\", \"why\", \"who\", \",\", \"your\", \"by\", \"like\", \"to\",", "+ \" \" elif \"noun\" in word: if \"pnoun\" in", "'__main__': poemtype = 'poem' if 'mushy' in sys.argv[1:]: poemtype =", "+ \" \" else: if \"verb\" in word and word", "newPoem = newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\", \".\") newPoem =", "str(uuid.uuid4()).split(\"-\")[0] random.seed(uuid.uuid5(uuid.NAMESPACE_DNS,seed_str).int) #tool = language_check.LanguageTool('en-US') self.poemtype = key if key", "+ v + \" \" elif \"adj\" in word: if", "word.lower(): isgood = False if isgood: poem2.append(\"newline\") if \"newline\" in", "\" return string def generatePretty(self, key, seed_str): if seed_str ==", "newPoem.replace(\" '\", \"'\") for punc in list(set(puncuation)): newPoem = newPoem.replace(\"", "\" elif \"person\" in word: v = self.generate(\"<person>\", 1).strip() if", "newPoem = newPoem.replace(\",.\", \",\") newPoem = newPoem.replace(\"!.\", \"!\") newPoem =", "+ punc, punc) newPoem = newPoem.replace(\" ,\", \",\") newPoem =", "en.verb.present( v, person=3, negate=False) + \" \" elif \"verb-past\" in", "[] foundFirstBreak = False for word in poem.replace(\"\\n\", \"newline\").split(): poem2.append(word.lower())", "= yaml.load(open(file,'r')) self.poemtype = \"<poem>\" def generate(self, key, num): gram", "= string + word + \" \" else: if \"verb\"", "0 poem2 = [] foundFirstBreak = False for word in", "poemtype + '>',hex_seed) return p,seed_str if __name__ == '__main__': poemtype", "\"?\") newPoem = newPoem.replace(\".?\", \".\") newPoem = newPoem.replace(\",.\", \",\") newPoem", "= newPoem.replace(\"\\n \\n \", \"\\n\\n\") newPoem = newPoem.replace(\" '\", \"'\")", "newPoem.replace(\"\\n \\n \", \"\\n\\n\") newPoem = newPoem.replace(\" '\", \"'\") for", "base64 import yaml import re try: import en except: print(\"DOWNLOD", "1).strip() if \"verb-inf\" in word: string = string + \\", "word: if \"mushy\" in self.poemtype: v = self.generate(\"<padj>\",1) else: if", "random.seed(uuid.uuid5(uuid.NAMESPACE_DNS,seed_str).int) #tool = language_check.LanguageTool('en-US') self.poemtype = key if key ==", "key if key == \"<mushypoem>\": key = \"<poem>\" poem =", "\"i\" or \"i'\" in word: word = word.capitalize() poem3.append(word) capitalize", "string + v + \" \" elif \"person\" in word:", "== '__main__': poemtype = 'poem' if 'mushy' in sys.argv[1:]: poemtype", "\"\\n\\n\") newPoem = newPoem.replace(\"\\n \\n \", \"\\n\\n\") newPoem = newPoem.replace(\"", "and secondLine: newPoem2 = newPoem2 + line + \" <br", "firstLine = True newPoem2 = newPoem2 + line + \"", "gram[i] else: for word in gram[i].split(): if \"<\" not in", "beforeFirstBreak = False else: breaks = 0 if beforeFirstBreak or", "'mushy' in sys.argv[1:]: poemtype = 'mushypoem' p,seed_str=generate_poem(poemtype) print((\"*\"*30 + \"\\n\"*5))", "= newPoem.replace(\"?.\", \"?\") newPoem = newPoem.replace(\".?\", \".\") newPoem = newPoem.replace(\",.\",", ".\", \". newline\") newPoem = newPoem.replace(\"newline ?\", \"? newline\") newPoem", "if \"pverb\" in word or \"mushy\" in self.poemtype: v =", "\"? newline\") newPoem = newPoem.replace(\"newline !\", \"! newline\") newPoem =", "len(line) > 0: if firstLine and not secondLine: newPoem2 =", "1).strip() else: v = self.generate(word, 1).strip() string = string +", "newPoem2 = newPoem2 + \"<p>\\n\" secondLine = True if firstLine", "and foundFirstBreak: isgood = True for dontbreak in list(dontbreaks +", "bnfDictionary('brain.yaml') def generate_poem(poemtype, hex_seed=None): p,seed_str = bnf.generatePretty('<' + poemtype +", "in word: v = self.generate(\"<person>\", 1).strip() if \"pl\" in word:", "+ \"<p>\\n\" secondLine = True if firstLine == False: firstLine", "punc) for punc in list(set(puncuation)): newPoem = newPoem.replace(\" \" +", "elif \"noun\" in word: if \"pnoun\" in word or \"mushy\"", "v = self.generate(\"<noun>\", 1).strip() if random.randint(1, 100) < THEME_PROB: v", "en.noun.plural(self.generate(word.replace(\"-pl\",\"\"),1)) else: v = self.generate(word, 1) string = string +", "word.capitalize() poem3.append(word) capitalize = False else: if breaks > 1:", "poem.replace(\"\\n\", \"newline\").split(): poem2.append(word.lower()) if random.randint(1, 100) < 2 and \"newline\"", "= key if key == \"<mushypoem>\": key = \"<poem>\" poem", "newPoem = newPoem.replace(\" \" + punc, punc) for punc in", "word: if \"pnoun\" in word or \"mushy\" in self.poemtype: v", "if random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-adj>\", 1).strip() else:", "word.lower(): isgood = False if isgood: poem3.append(random.choice(puncuation)) capitalize = True", "= self.grammar[key] if len(gram)==1: i = 0 else: i =", "v = self.generate(\"<fruit>\", 1).strip() if \"pl\" in word: v =", "\" else: if \"-pl\" in word: v = en.noun.plural(self.generate(word.replace(\"-pl\",\"\"),1)) else:", "newPoem = newPoem.replace(\"!.\", \"!\") newPoem = newPoem.replace(\"..\", \".\") newPoem =", "'<adverb>': if \"pverb\" in word or \"mushy\" in self.poemtype: v", "= False breaks = 0 poem2 = [] foundFirstBreak =", "newline\") newPoem = newPoem.replace(\"newline ,\", \", newline\") newPoem = newPoem.replace(\"newline\",", "else: i = random.randint(0, len(gram) - 1) string = \"\"", "poemtype = 'poem' if 'mushy' in sys.argv[1:]: poemtype = 'mushypoem'", "random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-adj>\", 1).strip() else: v", "newPoem.replace(title, \"<h1>\" + newTitle + \"</h1>\") newPoem2 = \"\" firstLine", "def __init__(self, file): self.grammar = yaml.load(open(file,'r')) self.poemtype = \"<poem>\" def", "poem2: if \"newline\" in word: breaks += 1 beforeFirstBreak =", "True for dontbreak in list(dontbreaks + puncuation): if dontbreak ==", "word = word.capitalize() poem3.append(word) capitalize = False else: if breaks", "= \"\" firstLine = False secondLine = False for line", "False secondLine = False for line in newPoem.split(\"\\n\"): if len(line)", "False else: breaks = 0 if beforeFirstBreak or word ==", "False for line in newPoem.split(\"\\n\"): if len(line) > 0: if", "in poem.replace(\"\\n\", \"newline\").split(): poem2.append(word.lower()) if random.randint(1, 100) < 2 and", "self.poemtype: v = self.generate(\"<pnoun>\", 1).strip() elif \"nnoun\" in word: v", "word: word = word.capitalize() capitalize = False for punc in", "\"</h1>\") newPoem2 = \"\" firstLine = False secondLine = False", "v + \" \" elif \"noun\" in word: if \"pnoun\"", "return string def generatePretty(self, key, seed_str): if seed_str == None:", "https://www.nodebox.net/code/data/media/linguistics.zip unzip linguistics.zip\"\"\") VERSION = \"1.1\" THEME_PROB = 0 class", "for word in gram[i].split(): if \"<\" not in word: string", "random.randint(1, 100) < THEME_PROB: v = self.generate(\"<theme-noun>\", 1).strip() if \"pl\"", "[\"of\", \"behind\", \"the\", \"when\", \"what\", \"why\", \"who\", \",\", \"your\", \"by\",", "\"\" firstLine = False secondLine = False for line in", "self.generate(\"<pverb>\", 1).strip() elif \"nverb\" in word: v = self.generate(\"<nverb>\", 1).strip()", "\"!\") newPoem = newPoem.replace(\"..\", \".\") newPoem = newPoem.replace(\"..\", \".\") newPoem", "string + \\ en.verb.present( v, person=3, negate=False) + \" \"", "self.generate(word, 1) string = string + v + \" \"", "True # for punc in list(set(puncuation)): # if punc in", "# poem3.append(random.choice(puncuation)) newPoem = \" \".join(poem3) newPoem = newPoem.replace(\" a", "\"adj\" in word: if \"mushy\" in self.poemtype: v = self.generate(\"<padj>\",1)", "= [] for line in re.sub(\"<.*?>\", \" \", p).split(\"\\n\"): if", "+ \\ en.verb.present( v, person=3, negate=False) + \" \" elif", "string = string + self.generate(word, 1) + \" \" elif", "self.poemtype = \"<poem>\" def generate(self, key, num): gram = self.grammar[key]", "print_function import datetime import os import random import sys import", "import sys import uuid import base64 import yaml import re", "if noPunc: # poem3.append(random.choice(puncuation)) newPoem = \" \".join(poem3) newPoem =", "title = newPoem.split(\"\\n\")[0] newTitle = title.replace(\".\", \"\") newPoem = newPoem.replace(title,", "gram[i].split(): if \"<\" not in word: string = string +", "\"<mushypoem>\": key = \"<poem>\" poem = self.generate(key, 1) poem =", "newPoem.replace(\"..\", \".\") title = newPoem.split(\"\\n\")[0] newTitle = title.replace(\".\", \"\") newPoem", "+ newTitle + \"</h1>\") newPoem2 = \"\" firstLine = False", "isgood: poem2.append(\"newline\") if \"newline\" in word: foundFirstBreak = True poem3", "1: capitalize = True if capitalize == True and \"newline\"", "if len(line) > 0: if firstLine and not secondLine: newPoem2", "#tool = language_check.LanguageTool('en-US') self.poemtype = key if key == \"<mushypoem>\":", "in word: breaks += 1 beforeFirstBreak = False else: breaks", "__name__ == '__main__': poemtype = 'poem' if 'mushy' in sys.argv[1:]:", "\"<h1>\" + newTitle + \"</h1>\") newPoem2 = \"\" firstLine =", "THEME_PROB: v = self.generate(\"<theme-verb>\", 1).strip() if \"verb-inf\" in word: string" ]
[ "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "may obtain # a copy of the License at #", "agreed to in writing, software # distributed under the License", "Unless required by applicable law or agreed to in writing,", "paginated=True, method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_snapshots_not_detailed(self): self.verify_list(self.proxy.snapshots,", "import _proxy from openstack.block_storage.v2 import snapshot from openstack.block_storage.v2 import stats", "self.proxy = _proxy.Proxy(self.session) def test_snapshot_get(self): self.verify_get(self.proxy.get_snapshot, snapshot.Snapshot) def test_snapshots_detailed(self): self.verify_list(self.proxy.snapshots,", "test_proxy_base class TestVolumeProxy(test_proxy_base.TestProxyBase): def setUp(self): super(TestVolumeProxy, self).setUp() self.proxy = _proxy.Proxy(self.session)", "distributed under the License is distributed on an \"AS IS\"", "openstack.block_storage.v2 import _proxy from openstack.block_storage.v2 import snapshot from openstack.block_storage.v2 import", "self.verify_get(self.proxy.get_snapshot, snapshot.Snapshot) def test_snapshots_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.SnapshotDetail, paginated=True, method_kwargs={\"details\": True, \"query\":", "expected_kwargs={\"query\": 1}) def test_volumes_not_detailed(self): self.verify_list(self.proxy.volumes, volume.Volume, paginated=True, method_kwargs={\"details\": False, \"query\":", "False, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_snapshot_create_attrs(self): self.verify_create(self.proxy.create_snapshot, snapshot.Snapshot) def", "License, Version 2.0 (the \"License\"); you may # not use", "CONDITIONS OF ANY KIND, either express or implied. See the", "paginated=True, method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_volume_create_attrs(self): self.verify_create(self.proxy.create_volume,", "def test_type_delete(self): self.verify_delete(self.proxy.delete_type, type.Type, False) def test_type_delete_ignore(self): self.verify_delete(self.proxy.delete_type, type.Type, True)", "obtain # a copy of the License at # #", "under the License. from openstack.block_storage.v2 import _proxy from openstack.block_storage.v2 import", "import snapshot from openstack.block_storage.v2 import stats from openstack.block_storage.v2 import type", "def test_snapshot_get(self): self.verify_get(self.proxy.get_snapshot, snapshot.Snapshot) def test_snapshots_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.SnapshotDetail, paginated=True, method_kwargs={\"details\":", "applicable law or agreed to in writing, software # distributed", "from openstack.block_storage.v2 import stats from openstack.block_storage.v2 import type from openstack.block_storage.v2", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, True) def test_type_get(self): self.verify_get(self.proxy.get_type, type.Type) def test_types(self): self.verify_list(self.proxy.types,", "True, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_volumes_not_detailed(self): self.verify_list(self.proxy.volumes, volume.Volume, paginated=True,", "False) def test_volume_delete_ignore(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, True) def test_volume_extend(self): self._verify(\"openstack.block_storage.v2.volume.Volume.extend\", self.proxy.extend_volume,", "type.Type, True) def test_volume_get(self): self.verify_get(self.proxy.get_volume, volume.Volume) def test_volumes_detailed(self): self.verify_list(self.proxy.volumes, volume.VolumeDetail,", "Version 2.0 (the \"License\"); you may # not use this", "specific language governing permissions and limitations # under the License.", "# not use this file except in compliance with the", "not use this file except in compliance with the License.", "OF ANY KIND, either express or implied. See the #", "self).setUp() self.proxy = _proxy.Proxy(self.session) def test_snapshot_get(self): self.verify_get(self.proxy.get_snapshot, snapshot.Snapshot) def test_snapshots_detailed(self):", "1}, expected_kwargs={\"query\": 1}) def test_snapshot_create_attrs(self): self.verify_create(self.proxy.create_snapshot, snapshot.Snapshot) def test_snapshot_delete(self): self.verify_delete(self.proxy.delete_snapshot,", "def setUp(self): super(TestVolumeProxy, self).setUp() self.proxy = _proxy.Proxy(self.session) def test_snapshot_get(self): self.verify_get(self.proxy.get_snapshot,", "1}, expected_kwargs={\"query\": 1}) def test_snapshots_not_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.Snapshot, paginated=True, method_kwargs={\"details\": False,", "writing, software # distributed under the License is distributed on", "import type from openstack.block_storage.v2 import volume from openstack.tests.unit import test_proxy_base", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "in writing, software # distributed under the License is distributed", "stats from openstack.block_storage.v2 import type from openstack.block_storage.v2 import volume from", "= _proxy.Proxy(self.session) def test_snapshot_get(self): self.verify_get(self.proxy.get_snapshot, snapshot.Snapshot) def test_snapshots_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.SnapshotDetail,", "in compliance with the License. You may obtain # a", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "License for the specific language governing permissions and limitations #", "test_volume_extend(self): self._verify(\"openstack.block_storage.v2.volume.Volume.extend\", self.proxy.extend_volume, method_args=[\"value\", \"new-size\"], expected_args=[\"new-size\"]) def test_backend_pools(self): self.verify_list(self.proxy.backend_pools, stats.Pools,", "test_volumes_detailed(self): self.verify_list(self.proxy.volumes, volume.VolumeDetail, paginated=True, method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\": 1})", "snapshot from openstack.block_storage.v2 import stats from openstack.block_storage.v2 import type from", "the License. You may obtain # a copy of the", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "use this file except in compliance with the License. You", "snapshot.Snapshot) def test_snapshot_delete(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, False) def test_snapshot_delete_ignore(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot,", "You may obtain # a copy of the License at", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "False) def test_snapshot_delete_ignore(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, True) def test_type_get(self): self.verify_get(self.proxy.get_type, type.Type)", "volume.VolumeDetail, paginated=True, method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_volumes_not_detailed(self):", "False) def test_type_delete_ignore(self): self.verify_delete(self.proxy.delete_type, type.Type, True) def test_volume_get(self): self.verify_get(self.proxy.get_volume, volume.Volume)", "test_snapshot_create_attrs(self): self.verify_create(self.proxy.create_snapshot, snapshot.Snapshot) def test_snapshot_delete(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, False) def test_snapshot_delete_ignore(self):", "volume from openstack.tests.unit import test_proxy_base class TestVolumeProxy(test_proxy_base.TestProxyBase): def setUp(self): super(TestVolumeProxy,", "test_snapshots_not_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.Snapshot, paginated=True, method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\": 1})", "test_type_get(self): self.verify_get(self.proxy.get_type, type.Type) def test_types(self): self.verify_list(self.proxy.types, type.Type, paginated=False) def test_type_create_attrs(self):", "1}) def test_volume_create_attrs(self): self.verify_create(self.proxy.create_volume, volume.Volume) def test_volume_delete(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, False)", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "def test_type_create_attrs(self): self.verify_create(self.proxy.create_type, type.Type) def test_type_delete(self): self.verify_delete(self.proxy.delete_type, type.Type, False) def", "from openstack.block_storage.v2 import _proxy from openstack.block_storage.v2 import snapshot from openstack.block_storage.v2", "\"query\": 1}, expected_kwargs={\"query\": 1}) def test_volumes_not_detailed(self): self.verify_list(self.proxy.volumes, volume.Volume, paginated=True, method_kwargs={\"details\":", "volume.Volume) def test_volume_delete(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, False) def test_volume_delete_ignore(self): self.verify_delete(self.proxy.delete_volume, volume.Volume,", "self.verify_get(self.proxy.get_volume, volume.Volume) def test_volumes_detailed(self): self.verify_list(self.proxy.volumes, volume.VolumeDetail, paginated=True, method_kwargs={\"details\": True, \"query\":", "type.Type) def test_types(self): self.verify_list(self.proxy.types, type.Type, paginated=False) def test_type_create_attrs(self): self.verify_create(self.proxy.create_type, type.Type)", "self.verify_create(self.proxy.create_snapshot, snapshot.Snapshot) def test_snapshot_delete(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, False) def test_snapshot_delete_ignore(self): self.verify_delete(self.proxy.delete_snapshot,", "test_volumes_not_detailed(self): self.verify_list(self.proxy.volumes, volume.Volume, paginated=True, method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\": 1})", "test_type_create_attrs(self): self.verify_create(self.proxy.create_type, type.Type) def test_type_delete(self): self.verify_delete(self.proxy.delete_type, type.Type, False) def test_type_delete_ignore(self):", "True) def test_volume_get(self): self.verify_get(self.proxy.get_volume, volume.Volume) def test_volumes_detailed(self): self.verify_list(self.proxy.volumes, volume.VolumeDetail, paginated=True,", "either express or implied. See the # License for the", "volume.Volume, True) def test_volume_extend(self): self._verify(\"openstack.block_storage.v2.volume.Volume.extend\", self.proxy.extend_volume, method_args=[\"value\", \"new-size\"], expected_args=[\"new-size\"]) def", "type.Type, paginated=False) def test_type_create_attrs(self): self.verify_create(self.proxy.create_type, type.Type) def test_type_delete(self): self.verify_delete(self.proxy.delete_type, type.Type,", "under the License is distributed on an \"AS IS\" BASIS,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "_proxy from openstack.block_storage.v2 import snapshot from openstack.block_storage.v2 import stats from", "test_snapshot_get(self): self.verify_get(self.proxy.get_snapshot, snapshot.Snapshot) def test_snapshots_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.SnapshotDetail, paginated=True, method_kwargs={\"details\": True,", "may # not use this file except in compliance with", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "1}) def test_snapshots_not_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.Snapshot, paginated=True, method_kwargs={\"details\": False, \"query\": 1},", "License is distributed on an \"AS IS\" BASIS, WITHOUT #", "with the License. You may obtain # a copy of", "KIND, either express or implied. See the # License for", "# License for the specific language governing permissions and limitations", "permissions and limitations # under the License. from openstack.block_storage.v2 import", "snapshot.Snapshot, False) def test_snapshot_delete_ignore(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, True) def test_type_get(self): self.verify_get(self.proxy.get_type,", "def test_types(self): self.verify_list(self.proxy.types, type.Type, paginated=False) def test_type_create_attrs(self): self.verify_create(self.proxy.create_type, type.Type) def", "def test_volume_get(self): self.verify_get(self.proxy.get_volume, volume.Volume) def test_volumes_detailed(self): self.verify_list(self.proxy.volumes, volume.VolumeDetail, paginated=True, method_kwargs={\"details\":", "1}, expected_kwargs={\"query\": 1}) def test_volumes_not_detailed(self): self.verify_list(self.proxy.volumes, volume.Volume, paginated=True, method_kwargs={\"details\": False,", "def test_snapshots_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.SnapshotDetail, paginated=True, method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\":", "you may # not use this file except in compliance", "\"License\"); you may # not use this file except in", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "snapshot.Snapshot, paginated=True, method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_snapshot_create_attrs(self):", "test_snapshots_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.SnapshotDetail, paginated=True, method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\": 1})", "express or implied. See the # License for the specific", "test_volume_delete(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, False) def test_volume_delete_ignore(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, True) def", "this file except in compliance with the License. You may", "language governing permissions and limitations # under the License. from", "self.verify_list(self.proxy.volumes, volume.VolumeDetail, paginated=True, method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\": 1}) def", "def test_volume_extend(self): self._verify(\"openstack.block_storage.v2.volume.Volume.extend\", self.proxy.extend_volume, method_args=[\"value\", \"new-size\"], expected_args=[\"new-size\"]) def test_backend_pools(self): self.verify_list(self.proxy.backend_pools,", "test_type_delete_ignore(self): self.verify_delete(self.proxy.delete_type, type.Type, True) def test_volume_get(self): self.verify_get(self.proxy.get_volume, volume.Volume) def test_volumes_detailed(self):", "compliance with the License. You may obtain # a copy", "the Apache License, Version 2.0 (the \"License\"); you may #", "import stats from openstack.block_storage.v2 import type from openstack.block_storage.v2 import volume", "def test_volume_create_attrs(self): self.verify_create(self.proxy.create_volume, volume.Volume) def test_volume_delete(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, False) def", "paginated=True, method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_volumes_not_detailed(self): self.verify_list(self.proxy.volumes,", "and limitations # under the License. from openstack.block_storage.v2 import _proxy", "import volume from openstack.tests.unit import test_proxy_base class TestVolumeProxy(test_proxy_base.TestProxyBase): def setUp(self):", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "TestVolumeProxy(test_proxy_base.TestProxyBase): def setUp(self): super(TestVolumeProxy, self).setUp() self.proxy = _proxy.Proxy(self.session) def test_snapshot_get(self):", "import test_proxy_base class TestVolumeProxy(test_proxy_base.TestProxyBase): def setUp(self): super(TestVolumeProxy, self).setUp() self.proxy =", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "test_types(self): self.verify_list(self.proxy.types, type.Type, paginated=False) def test_type_create_attrs(self): self.verify_create(self.proxy.create_type, type.Type) def test_type_delete(self):", "See the # License for the specific language governing permissions", "volume.Volume) def test_volumes_detailed(self): self.verify_list(self.proxy.volumes, volume.VolumeDetail, paginated=True, method_kwargs={\"details\": True, \"query\": 1},", "software # distributed under the License is distributed on an", "(the \"License\"); you may # not use this file except", "type from openstack.block_storage.v2 import volume from openstack.tests.unit import test_proxy_base class", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "the # License for the specific language governing permissions and", "self.verify_delete(self.proxy.delete_type, type.Type, True) def test_volume_get(self): self.verify_get(self.proxy.get_volume, volume.Volume) def test_volumes_detailed(self): self.verify_list(self.proxy.volumes,", "# under the License. from openstack.block_storage.v2 import _proxy from openstack.block_storage.v2", "def test_type_get(self): self.verify_get(self.proxy.get_type, type.Type) def test_types(self): self.verify_list(self.proxy.types, type.Type, paginated=False) def", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "# # Unless required by applicable law or agreed to", "super(TestVolumeProxy, self).setUp() self.proxy = _proxy.Proxy(self.session) def test_snapshot_get(self): self.verify_get(self.proxy.get_snapshot, snapshot.Snapshot) def", "def test_type_delete_ignore(self): self.verify_delete(self.proxy.delete_type, type.Type, True) def test_volume_get(self): self.verify_get(self.proxy.get_volume, volume.Volume) def", "the License. from openstack.block_storage.v2 import _proxy from openstack.block_storage.v2 import snapshot", "self.verify_delete(self.proxy.delete_volume, volume.Volume, True) def test_volume_extend(self): self._verify(\"openstack.block_storage.v2.volume.Volume.extend\", self.proxy.extend_volume, method_args=[\"value\", \"new-size\"], expected_args=[\"new-size\"])", "test_type_delete(self): self.verify_delete(self.proxy.delete_type, type.Type, False) def test_type_delete_ignore(self): self.verify_delete(self.proxy.delete_type, type.Type, True) def", "test_volume_create_attrs(self): self.verify_create(self.proxy.create_volume, volume.Volume) def test_volume_delete(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, False) def test_volume_delete_ignore(self):", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "openstack.block_storage.v2 import type from openstack.block_storage.v2 import volume from openstack.tests.unit import", "snapshot.Snapshot) def test_snapshots_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.SnapshotDetail, paginated=True, method_kwargs={\"details\": True, \"query\": 1},", "expected_kwargs={\"query\": 1}) def test_volume_create_attrs(self): self.verify_create(self.proxy.create_volume, volume.Volume) def test_volume_delete(self): self.verify_delete(self.proxy.delete_volume, volume.Volume,", "file except in compliance with the License. You may obtain", "License. from openstack.block_storage.v2 import _proxy from openstack.block_storage.v2 import snapshot from", "paginated=True, method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_snapshot_create_attrs(self): self.verify_create(self.proxy.create_snapshot,", "type.Type, False) def test_type_delete_ignore(self): self.verify_delete(self.proxy.delete_type, type.Type, True) def test_volume_get(self): self.verify_get(self.proxy.get_volume,", "openstack.block_storage.v2 import volume from openstack.tests.unit import test_proxy_base class TestVolumeProxy(test_proxy_base.TestProxyBase): def", "\"query\": 1}, expected_kwargs={\"query\": 1}) def test_volume_create_attrs(self): self.verify_create(self.proxy.create_volume, volume.Volume) def test_volume_delete(self):", "self.verify_delete(self.proxy.delete_volume, volume.Volume, False) def test_volume_delete_ignore(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, True) def test_volume_extend(self):", "setUp(self): super(TestVolumeProxy, self).setUp() self.proxy = _proxy.Proxy(self.session) def test_snapshot_get(self): self.verify_get(self.proxy.get_snapshot, snapshot.Snapshot)", "for the specific language governing permissions and limitations # under", "law or agreed to in writing, software # distributed under", "\"query\": 1}, expected_kwargs={\"query\": 1}) def test_snapshots_not_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.Snapshot, paginated=True, method_kwargs={\"details\":", "type.Type) def test_type_delete(self): self.verify_delete(self.proxy.delete_type, type.Type, False) def test_type_delete_ignore(self): self.verify_delete(self.proxy.delete_type, type.Type,", "True) def test_type_get(self): self.verify_get(self.proxy.get_type, type.Type) def test_types(self): self.verify_list(self.proxy.types, type.Type, paginated=False)", "OR CONDITIONS OF ANY KIND, either express or implied. See", "the specific language governing permissions and limitations # under the", "from openstack.block_storage.v2 import snapshot from openstack.block_storage.v2 import stats from openstack.block_storage.v2", "def test_snapshot_delete(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, False) def test_snapshot_delete_ignore(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, True)", "_proxy.Proxy(self.session) def test_snapshot_get(self): self.verify_get(self.proxy.get_snapshot, snapshot.Snapshot) def test_snapshots_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.SnapshotDetail, paginated=True,", "def test_snapshot_delete_ignore(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, True) def test_type_get(self): self.verify_get(self.proxy.get_type, type.Type) def", "from openstack.block_storage.v2 import type from openstack.block_storage.v2 import volume from openstack.tests.unit", "1}) def test_volumes_not_detailed(self): self.verify_list(self.proxy.volumes, volume.Volume, paginated=True, method_kwargs={\"details\": False, \"query\": 1},", "1}, expected_kwargs={\"query\": 1}) def test_volume_create_attrs(self): self.verify_create(self.proxy.create_volume, volume.Volume) def test_volume_delete(self): self.verify_delete(self.proxy.delete_volume,", "under the Apache License, Version 2.0 (the \"License\"); you may", "snapshot.Snapshot, True) def test_type_get(self): self.verify_get(self.proxy.get_type, type.Type) def test_types(self): self.verify_list(self.proxy.types, type.Type,", "except in compliance with the License. You may obtain #", "2.0 (the \"License\"); you may # not use this file", "test_volume_get(self): self.verify_get(self.proxy.get_volume, volume.Volume) def test_volumes_detailed(self): self.verify_list(self.proxy.volumes, volume.VolumeDetail, paginated=True, method_kwargs={\"details\": True,", "implied. See the # License for the specific language governing", "def test_volumes_not_detailed(self): self.verify_list(self.proxy.volumes, volume.Volume, paginated=True, method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\":", "self._verify(\"openstack.block_storage.v2.volume.Volume.extend\", self.proxy.extend_volume, method_args=[\"value\", \"new-size\"], expected_args=[\"new-size\"]) def test_backend_pools(self): self.verify_list(self.proxy.backend_pools, stats.Pools, paginated=False)", "test_snapshot_delete_ignore(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, True) def test_type_get(self): self.verify_get(self.proxy.get_type, type.Type) def test_types(self):", "from openstack.block_storage.v2 import volume from openstack.tests.unit import test_proxy_base class TestVolumeProxy(test_proxy_base.TestProxyBase):", "self.verify_list(self.proxy.snapshots, snapshot.SnapshotDetail, paginated=True, method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\": 1}) def", "License. You may obtain # a copy of the License", "from openstack.tests.unit import test_proxy_base class TestVolumeProxy(test_proxy_base.TestProxyBase): def setUp(self): super(TestVolumeProxy, self).setUp()", "openstack.tests.unit import test_proxy_base class TestVolumeProxy(test_proxy_base.TestProxyBase): def setUp(self): super(TestVolumeProxy, self).setUp() self.proxy", "self.verify_get(self.proxy.get_type, type.Type) def test_types(self): self.verify_list(self.proxy.types, type.Type, paginated=False) def test_type_create_attrs(self): self.verify_create(self.proxy.create_type,", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "ANY KIND, either express or implied. See the # License", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "def test_snapshots_not_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.Snapshot, paginated=True, method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\":", "# Unless required by applicable law or agreed to in", "test_snapshot_delete(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, False) def test_snapshot_delete_ignore(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, True) def", "self.verify_delete(self.proxy.delete_type, type.Type, False) def test_type_delete_ignore(self): self.verify_delete(self.proxy.delete_type, type.Type, True) def test_volume_get(self):", "self.verify_list(self.proxy.types, type.Type, paginated=False) def test_type_create_attrs(self): self.verify_create(self.proxy.create_type, type.Type) def test_type_delete(self): self.verify_delete(self.proxy.delete_type,", "def test_volumes_detailed(self): self.verify_list(self.proxy.volumes, volume.VolumeDetail, paginated=True, method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\":", "self.verify_list(self.proxy.volumes, volume.Volume, paginated=True, method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\": 1}) def", "class TestVolumeProxy(test_proxy_base.TestProxyBase): def setUp(self): super(TestVolumeProxy, self).setUp() self.proxy = _proxy.Proxy(self.session) def", "volume.Volume, paginated=True, method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_volume_create_attrs(self):", "method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_snapshots_not_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.Snapshot,", "openstack.block_storage.v2 import snapshot from openstack.block_storage.v2 import stats from openstack.block_storage.v2 import", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "openstack.block_storage.v2 import stats from openstack.block_storage.v2 import type from openstack.block_storage.v2 import", "def test_snapshot_create_attrs(self): self.verify_create(self.proxy.create_snapshot, snapshot.Snapshot) def test_snapshot_delete(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, False) def", "to in writing, software # distributed under the License is", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "\"query\": 1}, expected_kwargs={\"query\": 1}) def test_snapshot_create_attrs(self): self.verify_create(self.proxy.create_snapshot, snapshot.Snapshot) def test_snapshot_delete(self):", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "expected_kwargs={\"query\": 1}) def test_snapshot_create_attrs(self): self.verify_create(self.proxy.create_snapshot, snapshot.Snapshot) def test_snapshot_delete(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot,", "test_volume_delete_ignore(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, True) def test_volume_extend(self): self._verify(\"openstack.block_storage.v2.volume.Volume.extend\", self.proxy.extend_volume, method_args=[\"value\", \"new-size\"],", "self.verify_list(self.proxy.snapshots, snapshot.Snapshot, paginated=True, method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\": 1}) def", "method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_snapshot_create_attrs(self): self.verify_create(self.proxy.create_snapshot, snapshot.Snapshot)", "expected_kwargs={\"query\": 1}) def test_snapshots_not_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.Snapshot, paginated=True, method_kwargs={\"details\": False, \"query\":", "or agreed to in writing, software # distributed under the", "False, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_volume_create_attrs(self): self.verify_create(self.proxy.create_volume, volume.Volume) def", "def test_volume_delete(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, False) def test_volume_delete_ignore(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, True)", "governing permissions and limitations # under the License. from openstack.block_storage.v2", "self.verify_create(self.proxy.create_volume, volume.Volume) def test_volume_delete(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, False) def test_volume_delete_ignore(self): self.verify_delete(self.proxy.delete_volume,", "required by applicable law or agreed to in writing, software", "self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, False) def test_snapshot_delete_ignore(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, True) def test_type_get(self):", "self.verify_create(self.proxy.create_type, type.Type) def test_type_delete(self): self.verify_delete(self.proxy.delete_type, type.Type, False) def test_type_delete_ignore(self): self.verify_delete(self.proxy.delete_type,", "True, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_snapshots_not_detailed(self): self.verify_list(self.proxy.snapshots, snapshot.Snapshot, paginated=True,", "paginated=False) def test_type_create_attrs(self): self.verify_create(self.proxy.create_type, type.Type) def test_type_delete(self): self.verify_delete(self.proxy.delete_type, type.Type, False)", "def test_volume_delete_ignore(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, True) def test_volume_extend(self): self._verify(\"openstack.block_storage.v2.volume.Volume.extend\", self.proxy.extend_volume, method_args=[\"value\",", "1}) def test_snapshot_create_attrs(self): self.verify_create(self.proxy.create_snapshot, snapshot.Snapshot) def test_snapshot_delete(self): self.verify_delete(self.proxy.delete_snapshot, snapshot.Snapshot, False)", "snapshot.SnapshotDetail, paginated=True, method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_snapshots_not_detailed(self):", "limitations # under the License. from openstack.block_storage.v2 import _proxy from", "method_kwargs={\"details\": False, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_volume_create_attrs(self): self.verify_create(self.proxy.create_volume, volume.Volume)", "volume.Volume, False) def test_volume_delete_ignore(self): self.verify_delete(self.proxy.delete_volume, volume.Volume, True) def test_volume_extend(self): self._verify(\"openstack.block_storage.v2.volume.Volume.extend\",", "True) def test_volume_extend(self): self._verify(\"openstack.block_storage.v2.volume.Volume.extend\", self.proxy.extend_volume, method_args=[\"value\", \"new-size\"], expected_args=[\"new-size\"]) def test_backend_pools(self):", "or implied. See the # License for the specific language", "Apache License, Version 2.0 (the \"License\"); you may # not", "method_kwargs={\"details\": True, \"query\": 1}, expected_kwargs={\"query\": 1}) def test_volumes_not_detailed(self): self.verify_list(self.proxy.volumes, volume.Volume," ]
[ "('<' in cpp_object_name) and (\"debug\" in cpp_object): matched = re.match(r\".*?(\\w+)\\W+$\",", "(content[i + 1] == \"/\"): is_finding_single_comment = True elif c", "None: self.__args.output = self.__args.file_path self.__config = yaml.load(open(self.__args.config)) old_base = self.__config[\"_base_\"]", "elif cpp_object_type == CppHeaderParser.CppVariable: if cpp_object[\"type\"] != \"return\": if cpp_object[\"static\"]:", "naming conventions of source code''' parser = argparse.ArgumentParser(description=description) parser.add_argument(\"-c\", \"--config\",", "if matched is None: # with normal \"function\" style matched", "cpp_object = dict() cpp_object[\"name\"] = cpp_object_name cpp_object[\"line_number\"] = -1 elif", "return False return True def _get_argument_name(self, an_argument): if isinstance(an_argument, six.string_types):", "cpp_object): matched = re.match(r\".*?(\\w+)\\W+$\", cpp_object[\"debug\"]) if matched is not None:", "cpp_object[\"debug\"]) if matched is not None: cpp_object_name = matched.group(1) else:", "return # Parse union like names splitted = cpp_object_name.split() if", "# Verify Define Names for define_text in parsed_info.defines: self._validate_cpp_object(self.parse_define(define_text)) #", "is not None): internal_predeclares = [\"class\", \"struct\", \"union\"] if amember[\"type\"]", "name[len(prefix):]: chars.append(\"\\\\s*\") if achar.isalnum(): chars.append(achar) else: chars.append(\"\\\\\") chars.append(achar) return \"operator%s\"", "output path, we default it to input file # path", "name result[\"parameters\"] = parameters return result def _is_special_method(self, amethod): if", "FIXME: We just ignored this situation: # Code Snippets: static", "self._validate_cpp_object(cpp_object) # Verify Define Names for define_text in parsed_info.defines: self._validate_cpp_object(self.parse_define(define_text))", "for cpp_object in parsed_info.classes_order: self._validate_cpp_object(cpp_object) # Verify Struct Names for", "matched.group(2) is not None: parameter_names = matched.group(2).split(',') for parameter_name in", "not None: parameter_names = matched.group(2).split(',') for parameter_name in parameter_names: aparameter", "class_method_argument_re = \"struct_method_argument\" class_variant_re = \"struct_variant\" else: class_re = \"class\"", "self._source_lines = source_file.readlines() parsed_info = CppHeaderParser.CppHeader(self.__args.file_path) # Verify File Names", "cpp_object_name = cpp_object cpp_object = dict() cpp_object[\"name\"] = cpp_object_name cpp_object[\"line_number\"]", "\"filename\") def exec_(self): try: with open(self.__args.file_path, \"r\") as source_file: #", "\"/\"): is_finding_block_comment = False elif (c == \"/\") and (content[i", "cpp_object = CppFileName() cpp_object[\"name\"] = filename self._validate_cpp_object(cpp_object) # Verify Define", "self[\"name\"] = None self[\"line_number\"] = -1 class CppNamespace(dict): def __init__(self):", "Operator methods chars = [] for achar in name[len(prefix):]: chars.append(\"\\\\s*\")", "= '\\n'.join(rest_lines) code_lines = [] name_re = self._get_cpp_method_re(cpp_method[\"name\"]) name_start_pos =", "_validate_name(self, cpp_object, name_re): cpp_object_name = \"\" if isinstance(cpp_object, six.string_types): cpp_object_name", "if \"line_number\" in cpp_object: line_number = cpp_object[\"line_number\"] for amember in", "cpp_object[\"debug\"]) if matched.group(1) is not None: cpp_object[\"class\"] = matched.group(1) cpp_object[\"name\"]", "self._is_special_method(amethod): if ((amethod[\"name\"] != self._get_class_realname(cpp_object[\"name\"])) and (not amethod.get(\"constructor\", False)) and", "aparameter = CppDefineParameter() aparameter[\"name\"] = parameter_name.strip() parameters.append(aparameter) result = CppDefine()", "this file, but we should pass it, this # is", "function while True: # FIXME: Parse special case : \"struct", "= dict() avariant[\"name\"] = aname avariant[\"line_number\"] = cpp_method[\"line_number\"] self._validate_name(avariant, \"variant\")", "# Verify File Names filename = os.path.basename(self.__args.file_path) cpp_object = CppFileName()", "= re.match(r\".*?(\\w+)\\W+$\", cpp_object[\"debug\"]) if matched is not None: cpp_object_name =", "self._validate_name(avariant, \"variant\") def _validate_name(self, cpp_object, name_re): cpp_object_name = \"\" if", "if ('<' in cpp_object_name) and (\"debug\" in cpp_object): matched =", "filename = os.path.basename(self.__args.file_path) cpp_object = CppFileName() cpp_object[\"name\"] = filename self._validate_cpp_object(cpp_object)", "= content.index(';', parameters_stop_pos + 1) if semicolonPos <= i: return;", "when parsing # the class. self._validate_name(cpp_object, \"global_variant\") elif cpp_object_type ==", "argparse import CppHeaderParser import re import sys import yaml import", "in cpp_object[\"name\"]: self._validate_name(cpp_object, \"function\") break if self._get_class_realname(cpp_object[\"class\"]) == cpp_object[\"name\"]: #", "Parse special case : \"struct RArraySize <T ( & )", "re.findall(r\"\\w+\\W+(\\w+)\\s*=[^=]\", content) for aname in founded: avariant = dict() avariant[\"name\"]", "\"_base_\", \"function\": \"_base_\", \"variant\": \"_base_\", \"namespace\": \"_base_\", \"define\": \"_base_\", \"filename\":", "c = content[i] if is_finding_block_comment: # If finding block comment,", "Typdef Names for cpp_object in parsed_info.typedefs: self._validate_cpp_object(cpp_object) except SyntaxError as", "regex = r\"^[^<:]*?(?:(\\w+)::)?(\\w+)\\s*<\" matched = re.search(regex, cpp_object[\"debug\"]) if matched.group(1) is", "= \"\" if isinstance(cpp_object, six.string_types): cpp_object_name = cpp_object cpp_object =", "stack.append(i) i += 1 first_i = i last_i = 0", "in parsed_info.classes_order: self._validate_cpp_object(cpp_object) # Verify Struct Names for cpp_object in", "is the CppHeaderParser's problem. print(str(e)) return 0 return 0 def", "for cpp_object in parsed_info.functions: self._validate_cpp_object(cpp_object) # Verify Class Names for", "for amember in cpp_object[\"properties\"][access_specifier]: is_skip_validate = False if (\"type\" in", "self._validate_cpp_object(cpp_object) except SyntaxError as e: print(str(e)) return 1 except CppHeaderParser.CppHeaderParser.CppParseError", "parse_define(self, adefine): matched = re.match(r\"[^\\w]*(\\w+)(?:\\(([^\\)]*)\\)|\\s*).*\", adefine) name = matched.group(1) parameters", "False return True def _get_cpp_method_re(self, name): prefix = \"operator\" if", "parser.add_argument(\"-c\", \"--config\", help=\"Configuration file path (In YAML format)\", required=True) parser.add_argument(\"-o\",", "\"variant\": \"_base_\", \"namespace\": \"_base_\", \"define\": \"_base_\", \"filename\": \"_base_\", # Special", "pass if is_need_reraise: raise for aparameter in amethod[\"parameters\"]: an_object =", "error_message)) def _get_class_realname(self, class_name): return re.match(r\"(\\w+).*\", class_name).group(1) def _validate_cpp_object(self, cpp_object):", "# Normal Define Name self._validate_name(cpp_object, \"define\") else: # Function Liked", "= cpp_object[\"line_number\"] for amember in cpp_object[\"values\"]: # Use parent line", "maybe macro invokes. # FIXME: We just ignored this situation:", "Verify Class Names for cpp_object in parsed_info.classes_order: self._validate_cpp_object(cpp_object) # Verify", "Names for define_text in parsed_info.defines: self._validate_cpp_object(self.parse_define(define_text)) # Verify Function Names", "parsed_info.namespaces: cpp_object = CppNamespace() cpp_object[\"name\"] = namespace self._validate_cpp_object(cpp_object) # Verify", "override_table: base_name = override_table[name] my_config.update(self._get_config(base_name)) if name in self.__config: my_config.update(self.__config[name])", "self[\"parameters\"] = [] self[\"line_number\"] = -1 class CppDefineParameter(dict): def __init__(self):", "in amethod[\"parameters\"]: an_object = dict() an_object[\"line_number\"] = aparameter[\"line_number\"] if (aparameter[\"type\"].endswith(\"::*\")", "in [\"class\", \"struct\", \"union\"]: if not cpp_object[\"type\"].endswith(\"::\"): # Don't parse", "union like names splitted = cpp_object_name.split() if len(splitted) > 1:", "matched = re.match(self._get_config(name_re)[\"re\"], cpp_object_name) if matched is None: filename =", "is_need_reraise = False except SyntaxError: pass if is_need_reraise: raise for", "be parsed when parsing # the class. self._validate_name(cpp_object, \"global_variant\") elif", "c == \"\\n\": is_finding_single_comment = False elif (c == \"/\")", "file # path if self.__args.output is None: self.__args.output = self.__args.file_path", "self[\"name\"] = None self[\"line_number\"] = -1 class CppFileName(dict): def __init__(self):", "cpp_object): cpp_object_type = type(cpp_object) if cpp_object_type == CppDefine: if len(cpp_object[\"parameters\"])", "in name_re.split(\"_\")]), error_message) if self.__args.debug: traceback.print_stack() raise SyntaxError(\"%s:%s:error: Name '%s'", "\"_base_\", \"filename\": \"_base_\", # Special config use to define filename", "(c == \"/\") and (content[i + 1] == \"*\"): is_finding_block_comment", "class_name).group(1) def _validate_cpp_object(self, cpp_object): cpp_object_type = type(cpp_object) if cpp_object_type ==", "(c == \"*\") and (content[i + 1] == \"/\"): is_finding_block_comment", "== \"/\") and (content[i + 1] == \"*\"): is_finding_block_comment =", "\"namespace\": \"_base_\", \"define\": \"_base_\", \"filename\": \"_base_\", # Special config use", "def parse_define(self, adefine): matched = re.match(r\"[^\\w]*(\\w+)(?:\\(([^\\)]*)\\)|\\s*).*\", adefine) name = matched.group(1)", "if matched.group(1) is not None: cpp_object[\"class\"] = matched.group(1) cpp_object[\"name\"] =", "(\"type\" in amember) and (amember[\"type\"] is not None): internal_predeclares =", "start_line_index = cpp_method[\"line_number\"] - 1 # Extract cpp method codes", "_get_cpp_method_re(self, name): prefix = \"operator\" if not name.startswith(prefix): return re.escape(name)", "ignored. pass skipped_lines = cpp_method[\"line_number\"] + content.count(\"\\n\", 0, i) -", "cpp_object_name cpp_object[\"line_number\"] = -1 elif \"name\" in cpp_object: cpp_object_name =", "SyntaxError: pass if is_need_reraise: raise for aparameter in amethod[\"parameters\"]: an_object", "Code Snippets: static RSignal<void(int)> sReceived; if \"<\" not in cpp_object[\"name\"]:", "-1 if \"line_number\" in cpp_object: line_number = cpp_object[\"line_number\"] for amember", "\"<\" not in cpp_object[\"name\"]: self._validate_name(cpp_object, \"function\") break if self._get_class_realname(cpp_object[\"class\"]) ==", "source code''' parser = argparse.ArgumentParser(description=description) parser.add_argument(\"-c\", \"--config\", help=\"Configuration file path", "== CppHeaderParser.CppMethod: # Exclude \"main\" function while parsing global function", "content).span()[0] parameters_start_pos = content.index('(', name_start_pos) parameters_stop_pos = content.index(')', parameters_start_pos) stack", "<= i: return; except ValueError: # Not found a semicolon,", "an_object[\"name\"] = self._get_argument_name(aparameter) self._validate_name(an_object, class_method_argument_re) else: self._validate_name( {\"name\":matched.group(1), \"line_number\":amethod[\"line_number\"]}, \"typedef\")", "\"enum\": \"class\", \"enum_value\": \"define\", \"union\": \"struct\", } my_config = dict()", "return 1 except CppHeaderParser.CppHeaderParser.CppParseError as e: # CppHeaderParser can't parse", "\"*\") and (content[i + 1] == \"/\"): is_finding_block_comment = False", "stack[len(stack) - 1] i += 1 if len(stack) <= 0:", "is_finding_single_comment = True elif c == \"{\": stack.append(i) elif c", "class_re = \"class\" class_method_re = \"class_method\" class_method_argument_re = \"class_method_argument\" class_variant_re", "= re.match(r\".*typedef\\W[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", amethod[\"debug\"]) if matched is None: self._validate_codes_of_cpp_method(amethod) if not", "cpp_object[\"type\"].endswith(\"::\"): # Don't parse variable that implemented outside of #", "cpp_method[\"line_number\"] self._validate_name(avariant, \"variant\") def _validate_name(self, cpp_object, name_re): cpp_object_name = \"\"", "= filename self._validate_cpp_object(cpp_object) # Verify Define Names for define_text in", "0: return an_argument[\"name\"] # If it's a functor?? with \"class", "None: parameter_names = matched.group(2).split(',') for parameter_name in parameter_names: aparameter =", "True try: self._validate_name(amethod, \"define_function\") is_need_reraise = False except SyntaxError: pass", "import argparse import CppHeaderParser import re import sys import yaml", "CppHeaderParser.CppUnion: self._validate_name(cpp_object, \"union\") elif cpp_object_type == CppNamespace: self._validate_name(cpp_object, \"namespace\") elif", "elif cpp_object_type == CppFileName: self._validate_name(cpp_object, \"filename\") def exec_(self): try: with", "= \"class\" class_method_re = \"class_method\" class_method_argument_re = \"class_method_argument\" class_variant_re =", "not None: return True return False return True def _get_argument_name(self,", "def _is_special_method(self, amethod): if isinstance(amethod, six.string_types): amethod_name = amethod else:", "except SyntaxError as e: print(str(e)) return 1 except CppHeaderParser.CppHeaderParser.CppParseError as", "amethod.get(\"constructor\", False)) and (not amethod.get(\"destructor\", False))): try: self._validate_name(amethod, class_method_re) except", "global function while True: # FIXME: Parse special case :", "= matched.group(2) self._validate_name(cpp_object, \"class_method\") elif len(cpp_object[\"returns\"]) > 0: # If", "e: print(str(e)) return 1 except CppHeaderParser.CppHeaderParser.CppParseError as e: # CppHeaderParser", "required=True) parser.add_argument(\"-o\", \"--output\", help=\"Output file path\") parser.add_argument(\"-d\", \"--debug\", action='store_true', help=\"Print", "# Does not have valid name, we must not check", "is None: return \"\" else: return matched.group(1) def _get_config(self, name):", "# Verify Struct Names for cpp_object in parsed_info.structs_order: self._validate_cpp_object(cpp_object) #", "self.__config[\"_base_\"] self.__config[\"_base_\"] = { \"re\":\"[a-zA-Z0-9_]+\", \"error\": \"\", } self.__config[\"_base_\"].update(old_base) def", "Define Name self._validate_name(cpp_object, \"define_function\") for aparameter in cpp_object[\"parameters\"]: self._validate_name(aparameter, \"define_function_argument\")", "name_start_pos = re.search(name_re, content).span()[0] parameters_start_pos = content.index('(', name_start_pos) parameters_stop_pos =", "= self._get_cpp_method_re(cpp_method[\"name\"]) name_start_pos = re.search(name_re, content).span()[0] parameters_start_pos = content.index('(', name_start_pos)", "if semicolonPos <= i: return; except ValueError: # Not found", "= \"struct_method\" class_method_argument_re = \"struct_method_argument\" class_variant_re = \"struct_variant\" else: class_re", "cpp_object[\"parameters\"]: self._validate_name(aparameter, \"define_function_argument\") elif cpp_object_type == CppHeaderParser.CppClass: if \"struct\" in", "style matched = re.match(r\"^\\w+\\s*\\(\\w*::\\*(\\w+)\\)\\(.*$\", an_argument[\"type\"]) if matched is None: #", "parser.add_argument(\"-o\", \"--output\", help=\"Output file path\") parser.add_argument(\"-d\", \"--debug\", action='store_true', help=\"Print trace", "= [\"class\", \"struct\", \"union\"] if amember[\"type\"] in internal_predeclares: is_skip_validate =", "in parsed_info.namespaces: cpp_object = CppNamespace() cpp_object[\"name\"] = namespace self._validate_cpp_object(cpp_object) #", "> 0: return an_argument[\"name\"] # If it's a functor?? with", "\"class\": \"_base_\", \"function\": \"_base_\", \"variant\": \"_base_\", \"namespace\": \"_base_\", \"define\": \"_base_\",", "skip all other searching if (c == \"*\") and (content[i", "\"struct\", \"union\"] if amember[\"type\"] in internal_predeclares: is_skip_validate = True if", "= CppHeaderParser.CppHeader(self.__args.file_path) # Verify File Names filename = os.path.basename(self.__args.file_path) cpp_object", "= parameters return result def _is_special_method(self, amethod): if isinstance(amethod, six.string_types):", "cpp_object: if re.match(r\".*\\>\\s*{$\", cpp_object[\"debug\"]) is not None: break self._validate_codes_of_cpp_method(cpp_object) if", "have valid name, we must not check it . return", "other searching if (c == \"*\") and (content[i + 1]", "self._get_argument_name(aparameter) self._validate_name(an_object, class_method_argument_re) else: self._validate_name( {\"name\":matched.group(1), \"line_number\":amethod[\"line_number\"]}, \"typedef\") for access_specifier", "= cpp_object cpp_object = dict() cpp_object[\"name\"] = cpp_object_name cpp_object[\"line_number\"] =", "and (\")\" in aparameter[\"name\"])): an_object[\"name\"] = re.match(r\"(\\w+).*\", aparameter[\"name\"]).group(1) try: self._validate_name(an_object,", "pass skipped_lines = cpp_method[\"line_number\"] + content.count(\"\\n\", 0, i) - 2", "if (aparameter[\"type\"].endswith(\"::*\") and (\")\" in aparameter[\"name\"])): an_object[\"name\"] = re.match(r\"(\\w+).*\", aparameter[\"name\"]).group(1)", "cpp_object_type = type(cpp_object) if cpp_object_type == CppDefine: if len(cpp_object[\"parameters\"]) <=", "amember in cpp_object[\"structs\"][access_specifier]: self._validate_cpp_object(amember) for amember in cpp_object[\"enums\"][access_specifier]: self._validate_cpp_object(amember) elif", "is None: # with normal \"function\" style matched = re.match(r\"[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\",", "-1 elif \"name\" in cpp_object: cpp_object_name = cpp_object[\"name\"] if ('<'", "self._validate_name(cpp_object, \"namespace\") elif cpp_object_type == CppFileName: self._validate_name(cpp_object, \"filename\") def exec_(self):", "_validate_cpp_object(self, cpp_object): cpp_object_type = type(cpp_object) if cpp_object_type == CppDefine: if", "is_need_reraise = False except SyntaxError: pass if is_need_reraise: raise else:", "self._validate_cpp_object(amember) elif cpp_object_type == CppHeaderParser.CppStruct: self._validate_name(cpp_object, \"struct\") elif cpp_object_type ==", "for cpp_object in parsed_info.structs_order: self._validate_cpp_object(cpp_object) # Verify Enum Names for", "matched = re.match(r\"[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", an_argument[\"type\"]) if matched is None: return \"\"", "= \"class_method_argument\" class_variant_re = \"class_variant\" self._validate_name(cpp_object, class_re) for amethod in", "= content[i] if is_finding_block_comment: # If finding block comment, then", "\"function\" style matched = re.match(r\"[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", an_argument[\"type\"]) if matched is None:", "dict() if name in override_table: base_name = override_table[name] my_config.update(self._get_config(base_name)) if", "if (\"type\" in amember) and (amember[\"type\"] is not None): internal_predeclares", "in cpp_object['type']: self._validate_cpp_object(cpp_object) for namespace in parsed_info.namespaces: cpp_object = CppNamespace()", "in amember: amember[\"line_number\"] = line_number self._validate_name(amember, \"enum_value\") elif cpp_object_type ==", "Names for cpp_object in parsed_info.typedefs: self._validate_cpp_object(cpp_object) except SyntaxError as e:", "cpp_object_name = cpp_object[\"name\"] if ('<' in cpp_object_name) and (\"debug\" in", "self._validate_cpp_object(cpp_object) # Verify Class Names for cpp_object in parsed_info.classes_order: self._validate_cpp_object(cpp_object)", "if matched is not None: cpp_object_name = matched.group(1) else: return", "#!/usr/bin/env python import argparse import CppHeaderParser import re import sys", "Verify Typdef Names for cpp_object in parsed_info.typedefs: self._validate_cpp_object(cpp_object) except SyntaxError", "2 stack.append(i) i += 1 first_i = i last_i =", "(aparameter[\"type\"].endswith(\"::*\") and (\")\" in aparameter[\"name\"])): an_object[\"name\"] = re.match(r\"(\\w+).*\", aparameter[\"name\"]).group(1) try:", "parameters_stop_pos + 1) if semicolonPos <= i: return; except ValueError:", "matched.group(1) def _get_config(self, name): override_table = { \"class\": \"_base_\", \"function\":", "type(cpp_object) if cpp_object_type == CppDefine: if len(cpp_object[\"parameters\"]) <= 0: #", "if '...' in cpp_object_name: # Does not have valid name,", "an_argument if len(an_argument[\"name\"]) > 0: return an_argument[\"name\"] # If it's", "# Avoid checking member variable inside function body. if '{'", "import traceback class CppDefine(dict): def __init__(self): self[\"name\"] = None self[\"parameters\"]", "Names for cpp_object in parsed_info.classes_order: self._validate_cpp_object(cpp_object) # Verify Struct Names", "<T ( & ) [ N ]> {\" if \"debug\"", "CppDefine: if len(cpp_object[\"parameters\"]) <= 0: # Normal Define Name self._validate_name(cpp_object,", "\"define\", \"union\": \"struct\", } my_config = dict() if name in", "matched with rule : %s! %s\" % ( filename, cpp_object[\"line_number\"],", "# is the CppHeaderParser's problem. print(str(e)) return 0 return 0", "parser.parse_args() # If user does not specific output path, we", "to input file # path if self.__args.output is None: self.__args.output", "name, we must not check it . return if len(cpp_object_name)", "matched = re.match(r\".*typedef\\W[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", amethod[\"debug\"]) if matched is None: self._validate_codes_of_cpp_method(amethod) if", "description='''A styler just target to naming conventions of source code'''", "in name[len(prefix):]: chars.append(\"\\\\s*\") if achar.isalnum(): chars.append(achar) else: chars.append(\"\\\\\") chars.append(achar) return", "= content.index(')', parameters_start_pos) stack = [] try: i = content.index('{',", "for access_specifier in CppHeaderParser.supportedAccessSpecifier: for amember in cpp_object[\"properties\"][access_specifier]: is_skip_validate =", "# Special config use to define filename rule \"argument\": \"variant\",", "def exec_(self): try: with open(self.__args.file_path, \"r\") as source_file: # For", "aname in founded: avariant = dict() avariant[\"name\"] = aname avariant[\"line_number\"]", "\"line_number\" not in amember: amember[\"line_number\"] = line_number self._validate_name(amember, \"enum_value\") elif", "searching if c == \"\\n\": is_finding_single_comment = False elif (c", "name): override_table = { \"class\": \"_base_\", \"function\": \"_base_\", \"variant\": \"_base_\",", "\"line_number\" in cpp_object: line_number = cpp_object[\"line_number\"] for amember in cpp_object[\"values\"]:", "amethod else: amethod_name = amethod[\"name\"] founded = re.findall(r\"(?:^|[^\\w]+)operator[^\\w]+\", amethod_name) if", "user does not specific output path, we default it to", "if len(cpp_object[\"parameters\"]) <= 0: # Normal Define Name self._validate_name(cpp_object, \"define\")", "if __name__ == \"__main__\": # Execute only if run as", "None: # with normal \"function\" style matched = re.match(r\"[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", an_argument[\"type\"])", "else: chars.append(\"\\\\\") chars.append(achar) return \"operator%s\" % ''.join(chars) def _validate_codes_of_cpp_method(self, cpp_method):", "True def _get_cpp_method_re(self, name): prefix = \"operator\" if not name.startswith(prefix):", "= self._source_lines[start_line_index:] content = '\\n'.join(rest_lines) code_lines = [] name_re =", "0: # Normal Define Name self._validate_name(cpp_object, \"define\") else: # Function", "SyntaxError: pass if is_need_reraise: raise else: an_object[\"name\"] = self._get_argument_name(aparameter) self._validate_name(an_object,", "= re.match(r\"[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", an_argument[\"type\"]) if matched is None: return \"\" else:", "= -1 elif \"name\" in cpp_object: cpp_object_name = cpp_object[\"name\"] if", "for amember in cpp_object[\"enums\"][access_specifier]: self._validate_cpp_object(amember) elif cpp_object_type == CppHeaderParser.CppStruct: self._validate_name(cpp_object,", "parent line number if enum value does not have it's", "else: # Function Liked Define Name self._validate_name(cpp_object, \"define_function\") for aparameter", "special case : \"struct RArraySize <T ( & ) [", "if cpp_object[\"type\"] != \"return\": if cpp_object[\"static\"]: self._validate_name(cpp_object, \"static_variant\") elif cpp_object[\"type\"]", "# If finding single comment, then skip all other searching", "comment, then skip all other searching if (c == \"*\")", "= False if (\"type\" in amember) and (amember[\"type\"] is not", "Application(object): def __init__(self): description='''A styler just target to naming conventions", "i del stack[len(stack) - 1] i += 1 if len(stack)", "result[\"name\"] = name result[\"parameters\"] = parameters return result def _is_special_method(self,", "self.__args.debug: traceback.print_stack() raise SyntaxError(\"%s:%s:error: Name '%s' isn't matched with rule", "Verify Function Names for cpp_object in parsed_info.functions: self._validate_cpp_object(cpp_object) # Verify", "\"struct\", } my_config = dict() if name in override_table: base_name", "rule \"argument\": \"variant\", \"static_variant\": \"variant\", \"global_variant\": \"variant\", \"function_argument\": \"argument\", \"class_method_argument\":", "except SyntaxError: pass if is_need_reraise: raise for aparameter in amethod[\"parameters\"]:", "semicolonPos <= i: return; except ValueError: # Not found a", "# FIXME: Parse special case : \"struct RArraySize <T (", "name_re, error_message)) def _get_class_realname(self, class_name): return re.match(r\"(\\w+).*\", class_name).group(1) def _validate_cpp_object(self,", "<= 0: content = content[first_i:last_i] founded = re.findall(r\"\\w+\\W+(\\w+)\\s*=[^=]\", content) for", "if matched is None: filename = os.path.basename(self.__args.file_path) error_message = self._get_config(name_re)[\"error\"]", "# Operator methods chars = [] for achar in name[len(prefix):]:", "amethod[\"name\"] founded = re.findall(r\"(?:^|[^\\w]+)operator[^\\w]+\", amethod_name) if len(founded) <= 0: if", "macro invokes. # FIXME: We just ignored this situation: #", "in cpp_object_name) and (\"debug\" in cpp_object): matched = re.match(r\".*?(\\w+)\\W+$\", cpp_object[\"debug\"])", "<= 0): if \">\" in cpp_object[\"name\"]: regex = r\"^[^<:]*?(?:(\\w+)::)?(\\w+)\\s*<\" matched", "SyntaxError(\"%s:%s:error: Name '%s' isn't matched with rule : %s! %s\"", "'...' in cpp_object_name: # Does not have valid name, we", "if self._is_special_method(cpp_object): break if (cpp_object[\"class\"] is None) or (len(cpp_object[\"class\"]) <=", "\"typedef\") for access_specifier in CppHeaderParser.supportedAccessSpecifier: for amember in cpp_object[\"properties\"][access_specifier]: is_skip_validate", "re.match(r\".*?(\\w+)\\W+$\", cpp_object[\"debug\"]) if matched is not None: cpp_object_name = matched.group(1)", "matched is None: filename = os.path.basename(self.__args.file_path) error_message = self._get_config(name_re)[\"error\"] if", "= namespace self._validate_cpp_object(cpp_object) # Verify Typdef Names for cpp_object in", "> 1: cpp_object_name = splitted[-1] if '...' in cpp_object_name: #", "matched.group(1) cpp_object[\"name\"] = matched.group(2) self._validate_name(cpp_object, \"class_method\") elif len(cpp_object[\"returns\"]) > 0:", "# number if \"line_number\" not in amember: amember[\"line_number\"] = line_number", "if self.__args.debug: traceback.print_stack() raise SyntaxError(\"%s:%s:error: Name '%s' isn't matched with", "stack\") parser.add_argument(\"file_path\", help=\"Source file path\") self.__args = parser.parse_args() # If", "is not None: cpp_object[\"class\"] = matched.group(1) cpp_object[\"name\"] = matched.group(2) self._validate_name(cpp_object,", "class_name): return re.match(r\"(\\w+).*\", class_name).group(1) def _validate_cpp_object(self, cpp_object): cpp_object_type = type(cpp_object)", "aparameter[\"name\"]).group(1) try: self._validate_name(an_object, class_method_re) except SyntaxError: is_need_reraise = True try:", "as source_file: # For later parse by _validate_codes_of_cpp_method() self._source_lines =", "\"static_variant\") elif cpp_object[\"type\"] not in [\"class\", \"struct\", \"union\"]: if not", "parameters_stop_pos = content.index(')', parameters_start_pos) stack = [] try: i =", "and (not amethod.get(\"destructor\", False))): try: self._validate_name(amethod, class_method_re) except SyntaxError: is_need_reraise", "= self._get_argument_name(aparameter) self._validate_name(an_object, class_method_argument_re) else: self._validate_name( {\"name\":matched.group(1), \"line_number\":amethod[\"line_number\"]}, \"typedef\") for", "we default it to input file # path if self.__args.output", "cpp_object[\"values\"]: # Use parent line number if enum value does", "my_config = dict() if name in override_table: base_name = override_table[name]", "namespace in parsed_info.namespaces: cpp_object = CppNamespace() cpp_object[\"name\"] = namespace self._validate_cpp_object(cpp_object)", "cpp_object[\"name\"] = filename self._validate_cpp_object(cpp_object) # Verify Define Names for define_text", "return False if len(cpp_variable[\"type\"]) <= 0: return False return True", "main(): a = Application() sys.exit(a.exec_()) if __name__ == \"__main__\": #", "pass if is_need_reraise: raise else: an_object[\"name\"] = self._get_argument_name(aparameter) self._validate_name(an_object, class_method_argument_re)", "help=\"Source file path\") self.__args = parser.parse_args() # If user does", "not name.startswith(prefix): return re.escape(name) # Operator methods chars = []", "a semicolon, just ignored. pass skipped_lines = cpp_method[\"line_number\"] + content.count(\"\\n\",", "for define_text in parsed_info.defines: self._validate_cpp_object(self.parse_define(define_text)) # Verify Function Names for", "parsed_info.variables: # Avoid checking member variable inside function body. if", "an_object[\"name\"] = re.match(r\"(\\w+).*\", aparameter[\"name\"]).group(1) try: self._validate_name(an_object, class_method_re) except SyntaxError: is_need_reraise", "for rule_name in name_re.split(\"_\")]), error_message) if self.__args.debug: traceback.print_stack() raise SyntaxError(\"%s:%s:error:", "then skip all other searching if c == \"\\n\": is_finding_single_comment", "> 0: # If a function does not have return", "cpp_object[\"name\"]: # Constructor / Destructor will the same with class", "re.match(r\"[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", an_argument[\"type\"]) if matched is None: return \"\" else: return", "for achar in name[len(prefix):]: chars.append(\"\\\\s*\") if achar.isalnum(): chars.append(achar) else: chars.append(\"\\\\\")", "is_skip_validate: if amember[\"static\"]: self._validate_name(amember, \"static_variant\") else: self._validate_name(amember, class_variant_re) for amember", "self._validate_name(cpp_object, \"class_method\") break elif cpp_object_type == CppHeaderParser.CppUnion: self._validate_name(cpp_object, \"union\") elif", "elif cpp_object_type == CppHeaderParser.CppUnion: self._validate_name(cpp_object, \"union\") elif cpp_object_type == CppNamespace:", "If a function does not have return value(at least #", "re.match(self._get_config(name_re)[\"re\"], cpp_object_name) if matched is None: filename = os.path.basename(self.__args.file_path) error_message", "ValueError: # Not found a semicolon, just ignored. pass skipped_lines", "Verify File Names filename = os.path.basename(self.__args.file_path) cpp_object = CppFileName() cpp_object[\"name\"]", "implemented outside of # template class. It's already be parsed", "\"\" else: return matched.group(1) def _get_config(self, name): override_table = {", "+ content.count(\"\\n\", 0, i) - 2 stack.append(i) i += 1", "class_variant_re = \"class_variant\" self._validate_name(cpp_object, class_re) for amethod in cpp_object.get_all_methods(): matched", "CppHeaderParser's problem. print(str(e)) return 0 return 0 def main(): a", "if \"line_number\" not in amember: amember[\"line_number\"] = line_number self._validate_name(amember, \"enum_value\")", "<= 0: # Normal Define Name self._validate_name(cpp_object, \"define\") else: #", "= \"struct_method_argument\" class_variant_re = \"struct_variant\" else: class_re = \"class\" class_method_re", "= None self[\"parameters\"] = [] self[\"line_number\"] = -1 class CppDefineParameter(dict):", "-1 class CppNamespace(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"] =", "already be parsed when parsing # the class. self._validate_name(cpp_object, \"global_variant\")", "-1 class CppFileName(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"] =", "\"operator%s\" % ''.join(chars) def _validate_codes_of_cpp_method(self, cpp_method): start_line_index = cpp_method[\"line_number\"] -", "= content.index('(', name_start_pos) parameters_stop_pos = content.index(')', parameters_start_pos) stack = []", "block comment, then skip all other searching if (c ==", "cpp_object[\"debug\"]) is not None: break self._validate_codes_of_cpp_method(cpp_object) if cpp_object[\"name\"] == \"main\":", "== \"/\") and (content[i + 1] == \"/\"): is_finding_single_comment =", "self._validate_name( {\"name\":matched.group(1), \"line_number\":amethod[\"line_number\"]}, \"typedef\") for access_specifier in CppHeaderParser.supportedAccessSpecifier: for amember", "yaml.load(open(self.__args.config)) old_base = self.__config[\"_base_\"] self.__config[\"_base_\"] = { \"re\":\"[a-zA-Z0-9_]+\", \"error\": \"\",", "check it . return matched = re.match(self._get_config(name_re)[\"re\"], cpp_object_name) if matched", "file, but we should pass it, this # is the", "= [] try: i = content.index('{', parameters_stop_pos + 1) except", "( & ) [ N ]> {\" if \"debug\" in", "0: error_message = \"%s %s\" % ( ' '.join([rule_name.capitalize() for", "name, we must not check it . return matched =", "try: self._validate_name(amethod, class_method_re) except SyntaxError: is_need_reraise = True try: self._validate_name(amethod,", "def _validate_name(self, cpp_object, name_re): cpp_object_name = \"\" if isinstance(cpp_object, six.string_types):", "len(founded) <= 0: if re.match(r\"(?:^|.*\\W)operator\\W.*\", amethod[\"debug\"]) is not None: return", "override_table = { \"class\": \"_base_\", \"function\": \"_base_\", \"variant\": \"_base_\", \"namespace\":", "len(content)): c = content[i] if is_finding_block_comment: # If finding block", "class_re) for amethod in cpp_object.get_all_methods(): matched = re.match(r\".*typedef\\W[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", amethod[\"debug\"]) if", "amember: amember[\"line_number\"] = line_number self._validate_name(amember, \"enum_value\") elif cpp_object_type == CppHeaderParser.CppVariable:", "# Extract cpp method codes rest_lines = self._source_lines[start_line_index:] content =", "CppFileName(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"] = -1 class", "a function does not have return value(at least # \"void\"),", "self.__config: my_config.update(self.__config[name]) return my_config def _is_valid_variable(self, cpp_variable): if cpp_variable[\"type\"] ==", "amember[\"static\"]: self._validate_name(amember, \"static_variant\") else: self._validate_name(amember, class_variant_re) for amember in cpp_object[\"structs\"][access_specifier]:", "\"define_function_argument\") elif cpp_object_type == CppHeaderParser.CppClass: if \"struct\" in cpp_object[\"declaration_method\"]: class_re", "have it's line # number if \"line_number\" not in amember:", "name): prefix = \"operator\" if not name.startswith(prefix): return re.escape(name) #", "cpp_object[\"name\"] = namespace self._validate_cpp_object(cpp_object) # Verify Typdef Names for cpp_object", "an_argument[\"name\"] # If it's a functor?? with \"class name::function\" style", "must not check it . return if len(cpp_object_name) <= 0:", "(c == \"/\") and (content[i + 1] == \"/\"): is_finding_single_comment", "with rule : %s! %s\" % ( filename, cpp_object[\"line_number\"], cpp_object_name,", "name = matched.group(1) parameters = [] if matched.group(2) is not", "self._validate_name(an_object, class_method_re) except SyntaxError: is_need_reraise = True try: self._validate_name(amethod, \"define_function\")", "# If it's a functor?? with \"class name::function\" style matched", "def __init__(self): self[\"name\"] = None self[\"parameters\"] = [] self[\"line_number\"] =", "\"function_argument\": \"argument\", \"class_method_argument\": \"function_argument\", \"struct_method_argument\": \"class_method_argument\", \"define_function_argument\": \"function_argument\", \"define_function\": \"function\",", "0 is_finding_block_comment = False is_finding_single_comment = False while (len(stack) >", "= i last_i = 0 is_finding_block_comment = False is_finding_single_comment =", "return an_argument[\"name\"] # If it's a functor?? with \"class name::function\"", "in cpp_object: if re.match(r\".*\\>\\s*{$\", cpp_object[\"debug\"]) is not None: break self._validate_codes_of_cpp_method(cpp_object)", "# with normal \"function\" style matched = re.match(r\"[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", an_argument[\"type\"]) if", "= content[first_i:last_i] founded = re.findall(r\"\\w+\\W+(\\w+)\\s*=[^=]\", content) for aname in founded:", "self._validate_name(an_object, class_method_argument_re) else: self._validate_name( {\"name\":matched.group(1), \"line_number\":amethod[\"line_number\"]}, \"typedef\") for access_specifier in", "not is_skip_validate: if amember[\"static\"]: self._validate_name(amember, \"static_variant\") else: self._validate_name(amember, class_variant_re) for", "it . return if len(cpp_object_name) <= 0: # Does not", "= -1 if \"line_number\" in cpp_object: line_number = cpp_object[\"line_number\"] for", "Verify Struct Names for cpp_object in parsed_info.structs_order: self._validate_cpp_object(cpp_object) # Verify", "= re.search(regex, cpp_object[\"debug\"]) if matched.group(1) is not None: cpp_object[\"class\"] =", "help=\"Output file path\") parser.add_argument(\"-d\", \"--debug\", action='store_true', help=\"Print trace stack\") parser.add_argument(\"file_path\",", "for aparameter in amethod[\"parameters\"]: an_object = dict() an_object[\"line_number\"] = aparameter[\"line_number\"]", "in aparameter[\"name\"])): an_object[\"name\"] = re.match(r\"(\\w+).*\", aparameter[\"name\"]).group(1) try: self._validate_name(an_object, class_method_re) except", "= re.findall(r\"(?:^|[^\\w]+)operator[^\\w]+\", amethod_name) if len(founded) <= 0: if re.match(r\"(?:^|.*\\W)operator\\W.*\", amethod[\"debug\"])", "self._validate_cpp_object(cpp_object) for namespace in parsed_info.namespaces: cpp_object = CppNamespace() cpp_object[\"name\"] =", "and (content[i + 1] == \"/\"): is_finding_single_comment = True elif", "== \"main\": break if self._is_special_method(cpp_object): break if (cpp_object[\"class\"] is None)", "class_method_argument_re = \"class_method_argument\" class_variant_re = \"class_variant\" self._validate_name(cpp_object, class_re) for amethod", "template class. It's already be parsed when parsing # the", "Exclude \"main\" function while parsing global function while True: #", "if cpp_object[\"name\"] == \"main\": break if self._is_special_method(cpp_object): break if (cpp_object[\"class\"]", "len(stack) <= 0: content = content[first_i:last_i] founded = re.findall(r\"\\w+\\W+(\\w+)\\s*=[^=]\", content)", "\"struct RArraySize <T ( & ) [ N ]> {\"", "help=\"Print trace stack\") parser.add_argument(\"file_path\", help=\"Source file path\") self.__args = parser.parse_args()", "if matched.group(2) is not None: parameter_names = matched.group(2).split(',') for parameter_name", "parameters return result def _is_special_method(self, amethod): if isinstance(amethod, six.string_types): amethod_name", "# Don't parse variable that implemented outside of # template", "return value(at least # \"void\"), it maybe macro invokes. #", "if len(founded) <= 0: if re.match(r\"(?:^|.*\\W)operator\\W.*\", amethod[\"debug\"]) is not None:", "cpp_object[\"name\"] == \"main\": break if self._is_special_method(cpp_object): break if (cpp_object[\"class\"] is", "and (content[i + 1] == \"*\"): is_finding_block_comment = True elif", "class CppDefine(dict): def __init__(self): self[\"name\"] = None self[\"parameters\"] = []", "parse variable that implemented outside of # template class. It's", "= dict() an_object[\"line_number\"] = aparameter[\"line_number\"] if (aparameter[\"type\"].endswith(\"::*\") and (\")\" in", "# Function Liked Define Name self._validate_name(cpp_object, \"define_function\") for aparameter in", "in cpp_object): matched = re.match(r\".*?(\\w+)\\W+$\", cpp_object[\"debug\"]) if matched is not", "cpp_object_name.split() if len(splitted) > 1: cpp_object_name = splitted[-1] if '...'", "and (i < len(content)): c = content[i] if is_finding_block_comment: #", "specific output path, we default it to input file #", "\"struct_method\": \"class_method\", \"class_variant\": \"variant\", \"struct_variant\": \"class_variant\", \"typedef\": \"class\", \"struct\": \"class\",", "matched = re.search(regex, cpp_object[\"debug\"]) if matched.group(1) is not None: cpp_object[\"class\"]", "class_variant_re = \"struct_variant\" else: class_re = \"class\" class_method_re = \"class_method\"", "== CppHeaderParser.CppStruct: self._validate_name(cpp_object, \"struct\") elif cpp_object_type == CppHeaderParser.CppEnum: self._validate_name(cpp_object, \"enum\")", "as e: # CppHeaderParser can't parse this file, but we", "is_finding_block_comment: # If finding block comment, then skip all other", "\"struct_method_argument\": \"class_method_argument\", \"define_function_argument\": \"function_argument\", \"define_function\": \"function\", \"class_method\": \"function\", \"struct_method\": \"class_method\",", "= { \"re\":\"[a-zA-Z0-9_]+\", \"error\": \"\", } self.__config[\"_base_\"].update(old_base) def parse_define(self, adefine):", "None: self._validate_codes_of_cpp_method(amethod) if not self._is_special_method(amethod): if ((amethod[\"name\"] != self._get_class_realname(cpp_object[\"name\"])) and", "for amember in cpp_object[\"structs\"][access_specifier]: self._validate_cpp_object(amember) for amember in cpp_object[\"enums\"][access_specifier]: self._validate_cpp_object(amember)", "self._validate_name(amember, class_variant_re) for amember in cpp_object[\"structs\"][access_specifier]: self._validate_cpp_object(amember) for amember in", "\"class_method\") break elif cpp_object_type == CppHeaderParser.CppUnion: self._validate_name(cpp_object, \"union\") elif cpp_object_type", "= os.path.basename(self.__args.file_path) error_message = self._get_config(name_re)[\"error\"] if len(error_message) > 0: error_message", "'{' not in cpp_object['type']: self._validate_cpp_object(cpp_object) for namespace in parsed_info.namespaces: cpp_object", "0) and (i < len(content)): c = content[i] if is_finding_block_comment:", "last_i = i del stack[len(stack) - 1] i += 1", "not in cpp_object['type']: self._validate_cpp_object(cpp_object) for namespace in parsed_info.namespaces: cpp_object =", "try: with open(self.__args.file_path, \"r\") as source_file: # For later parse", "%s\" % ( filename, cpp_object[\"line_number\"], cpp_object_name, name_re, error_message)) def _get_class_realname(self,", "self._validate_cpp_object(cpp_object) # Verify Variable Names for cpp_object in parsed_info.variables: #", "\"class name::function\" style matched = re.match(r\"^\\w+\\s*\\(\\w*::\\*(\\w+)\\)\\(.*$\", an_argument[\"type\"]) if matched is", "= False while (len(stack) > 0) and (i < len(content)):", "while True: # FIXME: Parse special case : \"struct RArraySize", "path if self.__args.output is None: self.__args.output = self.__args.file_path self.__config =", "\"class\", \"enum\": \"class\", \"enum_value\": \"define\", \"union\": \"struct\", } my_config =", "in self.__config: my_config.update(self.__config[name]) return my_config def _is_valid_variable(self, cpp_variable): if cpp_variable[\"type\"]", "cpp_object_name: # Does not have valid name, we must not", "in cpp_object[\"name\"]: regex = r\"^[^<:]*?(?:(\\w+)::)?(\\w+)\\s*<\" matched = re.search(regex, cpp_object[\"debug\"]) if", "return matched.group(1) def _get_config(self, name): override_table = { \"class\": \"_base_\",", "== \"return\": return False if len(cpp_variable[\"type\"]) <= 0: return False", "_is_valid_variable(self, cpp_variable): if cpp_variable[\"type\"] == \"return\": return False if len(cpp_variable[\"type\"])", "== CppDefine: if len(cpp_object[\"parameters\"]) <= 0: # Normal Define Name", "= False elif (c == \"/\") and (content[i + 1]", "CppHeaderParser.CppClass: if \"struct\" in cpp_object[\"declaration_method\"]: class_re = \"struct\" class_method_re =", "parsed_info = CppHeaderParser.CppHeader(self.__args.file_path) # Verify File Names filename = os.path.basename(self.__args.file_path)", "should pass it, this # is the CppHeaderParser's problem. print(str(e))", "((amethod[\"name\"] != self._get_class_realname(cpp_object[\"name\"])) and (not amethod.get(\"constructor\", False)) and (not amethod.get(\"destructor\",", "aparameter[\"name\"] = parameter_name.strip() parameters.append(aparameter) result = CppDefine() result[\"name\"] = name", "same with class name break self._validate_name(cpp_object, \"class_method\") break elif cpp_object_type", "= re.match(r\"(\\w+).*\", aparameter[\"name\"]).group(1) try: self._validate_name(an_object, class_method_re) except SyntaxError: is_need_reraise =", "cpp_object_type == CppHeaderParser.CppEnum: self._validate_name(cpp_object, \"enum\") line_number = -1 if \"line_number\"", "does not have it's line # number if \"line_number\" not", "function does not have return value(at least # \"void\"), it", "have return value(at least # \"void\"), it maybe macro invokes.", "if len(cpp_object_name) <= 0: # Does not have valid name,", "len(cpp_object_name) <= 0: # Does not have valid name, we", "return True def _get_argument_name(self, an_argument): if isinstance(an_argument, six.string_types): return an_argument", "aparameter in amethod[\"parameters\"]: an_object = dict() an_object[\"line_number\"] = aparameter[\"line_number\"] if", "\"struct_variant\": \"class_variant\", \"typedef\": \"class\", \"struct\": \"class\", \"enum\": \"class\", \"enum_value\": \"define\",", "= False except SyntaxError: pass if is_need_reraise: raise for aparameter", "= dict() cpp_object[\"name\"] = cpp_object_name cpp_object[\"line_number\"] = -1 elif \"name\"", "function body. if '{' not in cpp_object['type']: self._validate_cpp_object(cpp_object) for namespace", "cpp_object in parsed_info.typedefs: self._validate_cpp_object(cpp_object) except SyntaxError as e: print(str(e)) return", "and (content[i + 1] == \"/\"): is_finding_block_comment = False elif", "aparameter in cpp_object[\"parameters\"]: self._validate_name(aparameter, \"define_function_argument\") elif cpp_object_type == CppHeaderParser.CppClass: if", "} my_config = dict() if name in override_table: base_name =", "False if (\"type\" in amember) and (amember[\"type\"] is not None):", "]> {\" if \"debug\" in cpp_object: if re.match(r\".*\\>\\s*{$\", cpp_object[\"debug\"]) is", "os.path.basename(self.__args.file_path) cpp_object = CppFileName() cpp_object[\"name\"] = filename self._validate_cpp_object(cpp_object) # Verify", "{\"name\":matched.group(1), \"line_number\":amethod[\"line_number\"]}, \"typedef\") for access_specifier in CppHeaderParser.supportedAccessSpecifier: for amember in", "\"variant\", \"struct_variant\": \"class_variant\", \"typedef\": \"class\", \"struct\": \"class\", \"enum\": \"class\", \"enum_value\":", "return \"\" else: return matched.group(1) def _get_config(self, name): override_table =", "re.match(r\".*typedef\\W[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", amethod[\"debug\"]) if matched is None: self._validate_codes_of_cpp_method(amethod) if not self._is_special_method(amethod):", "CppHeaderParser.CppEnum: self._validate_name(cpp_object, \"enum\") line_number = -1 if \"line_number\" in cpp_object:", "cpp_object_type == CppDefine: if len(cpp_object[\"parameters\"]) <= 0: # Normal Define", "else: self._validate_name( {\"name\":matched.group(1), \"line_number\":amethod[\"line_number\"]}, \"typedef\") for access_specifier in CppHeaderParser.supportedAccessSpecifier: for", "source_file.readlines() parsed_info = CppHeaderParser.CppHeader(self.__args.file_path) # Verify File Names filename =", "[] try: i = content.index('{', parameters_stop_pos + 1) except ValueError:", "case : \"struct RArraySize <T ( & ) [ N", "else: an_object[\"name\"] = self._get_argument_name(aparameter) self._validate_name(an_object, class_method_argument_re) else: self._validate_name( {\"name\":matched.group(1), \"line_number\":amethod[\"line_number\"]},", "for namespace in parsed_info.namespaces: cpp_object = CppNamespace() cpp_object[\"name\"] = namespace", "value does not have it's line # number if \"line_number\"", "matched.group(1) else: return # Parse union like names splitted =", "Verify Variable Names for cpp_object in parsed_info.variables: # Avoid checking", "if matched is None: return \"\" else: return matched.group(1) def", "parameters.append(aparameter) result = CppDefine() result[\"name\"] = name result[\"parameters\"] = parameters", "RSignal<void(int)> sReceived; if \"<\" not in cpp_object[\"name\"]: self._validate_name(cpp_object, \"function\") break", "return 0 def main(): a = Application() sys.exit(a.exec_()) if __name__", "my_config.update(self.__config[name]) return my_config def _is_valid_variable(self, cpp_variable): if cpp_variable[\"type\"] == \"return\":", "cpp_object_type == CppFileName: self._validate_name(cpp_object, \"filename\") def exec_(self): try: with open(self.__args.file_path,", "six.string_types): amethod_name = amethod else: amethod_name = amethod[\"name\"] founded =", "cpp_object[\"structs\"][access_specifier]: self._validate_cpp_object(amember) for amember in cpp_object[\"enums\"][access_specifier]: self._validate_cpp_object(amember) elif cpp_object_type ==", "RArraySize <T ( & ) [ N ]> {\" if", "codes rest_lines = self._source_lines[start_line_index:] content = '\\n'.join(rest_lines) code_lines = []", "Destructor will the same with class name break self._validate_name(cpp_object, \"class_method\")", "cpp_object_type == CppHeaderParser.CppMethod: # Exclude \"main\" function while parsing global", "<= 0: return False return True def _get_cpp_method_re(self, name): prefix", "== \"\\n\": is_finding_single_comment = False elif (c == \"/\") and", "amember[\"type\"] in internal_predeclares: is_skip_validate = True if not is_skip_validate: if", "not in cpp_object[\"name\"]: self._validate_name(cpp_object, \"function\") break if self._get_class_realname(cpp_object[\"class\"]) == cpp_object[\"name\"]:", "self.__config = yaml.load(open(self.__args.config)) old_base = self.__config[\"_base_\"] self.__config[\"_base_\"] = { \"re\":\"[a-zA-Z0-9_]+\",", "\"function_argument\", \"struct_method_argument\": \"class_method_argument\", \"define_function_argument\": \"function_argument\", \"define_function\": \"function\", \"class_method\": \"function\", \"struct_method\":", "is_finding_single_comment: # If finding single comment, then skip all other", "not check it . return matched = re.match(self._get_config(name_re)[\"re\"], cpp_object_name) if", "If user does not specific output path, we default it", "self.__args.output is None: self.__args.output = self.__args.file_path self.__config = yaml.load(open(self.__args.config)) old_base", "just target to naming conventions of source code''' parser =", "\"re\":\"[a-zA-Z0-9_]+\", \"error\": \"\", } self.__config[\"_base_\"].update(old_base) def parse_define(self, adefine): matched =", "elif is_finding_single_comment: # If finding single comment, then skip all", "and (\"debug\" in cpp_object): matched = re.match(r\".*?(\\w+)\\W+$\", cpp_object[\"debug\"]) if matched", "False))): try: self._validate_name(amethod, class_method_re) except SyntaxError: is_need_reraise = True try:", "[] for achar in name[len(prefix):]: chars.append(\"\\\\s*\") if achar.isalnum(): chars.append(achar) else:", "traceback.print_stack() raise SyntaxError(\"%s:%s:error: Name '%s' isn't matched with rule :", "= splitted[-1] if '...' in cpp_object_name: # Does not have", "if self.__args.output is None: self.__args.output = self.__args.file_path self.__config = yaml.load(open(self.__args.config))", "path\") self.__args = parser.parse_args() # If user does not specific", "action='store_true', help=\"Print trace stack\") parser.add_argument(\"file_path\", help=\"Source file path\") self.__args =", "pass it, this # is the CppHeaderParser's problem. print(str(e)) return", "stack = [] try: i = content.index('{', parameters_stop_pos + 1)", "None: break self._validate_codes_of_cpp_method(cpp_object) if cpp_object[\"name\"] == \"main\": break if self._is_special_method(cpp_object):", "self._get_class_realname(cpp_object[\"name\"])) and (not amethod.get(\"constructor\", False)) and (not amethod.get(\"destructor\", False))): try:", "__init__(self): self[\"name\"] = None self[\"line_number\"] = -1 class CppNamespace(dict): def", "= -1 class CppDefineParameter(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"]", "name_re = self._get_cpp_method_re(cpp_method[\"name\"]) name_start_pos = re.search(name_re, content).span()[0] parameters_start_pos = content.index('(',", "def __init__(self): self[\"name\"] = None self[\"line_number\"] = -1 class CppFileName(dict):", "Not found a semicolon, just ignored. pass skipped_lines = cpp_method[\"line_number\"]", "return re.match(r\"(\\w+).*\", class_name).group(1) def _validate_cpp_object(self, cpp_object): cpp_object_type = type(cpp_object) if", "# Verify Variable Names for cpp_object in parsed_info.variables: # Avoid", "number if \"line_number\" not in amember: amember[\"line_number\"] = line_number self._validate_name(amember,", "cpp_object_name = matched.group(1) else: return # Parse union like names", "- 2 stack.append(i) i += 1 first_i = i last_i", "open(self.__args.file_path, \"r\") as source_file: # For later parse by _validate_codes_of_cpp_method()", "\"class\", \"struct\": \"class\", \"enum\": \"class\", \"enum_value\": \"define\", \"union\": \"struct\", }", "\"return\": if cpp_object[\"static\"]: self._validate_name(cpp_object, \"static_variant\") elif cpp_object[\"type\"] not in [\"class\",", "> 0: error_message = \"%s %s\" % ( ' '.join([rule_name.capitalize()", "self._validate_name(cpp_object, \"static_variant\") elif cpp_object[\"type\"] not in [\"class\", \"struct\", \"union\"]: if", "False return True def _get_argument_name(self, an_argument): if isinstance(an_argument, six.string_types): return", "not None): internal_predeclares = [\"class\", \"struct\", \"union\"] if amember[\"type\"] in", "def _validate_cpp_object(self, cpp_object): cpp_object_type = type(cpp_object) if cpp_object_type == CppDefine:", "Special config use to define filename rule \"argument\": \"variant\", \"static_variant\":", "} self.__config[\"_base_\"].update(old_base) def parse_define(self, adefine): matched = re.match(r\"[^\\w]*(\\w+)(?:\\(([^\\)]*)\\)|\\s*).*\", adefine) name", "matched is None: self._validate_codes_of_cpp_method(amethod) if not self._is_special_method(amethod): if ((amethod[\"name\"] !=", "line_number = cpp_object[\"line_number\"] for amember in cpp_object[\"values\"]: # Use parent", "just ignored. pass skipped_lines = cpp_method[\"line_number\"] + content.count(\"\\n\", 0, i)", "\"define_function\") for aparameter in cpp_object[\"parameters\"]: self._validate_name(aparameter, \"define_function_argument\") elif cpp_object_type ==", "normal \"function\" style matched = re.match(r\"[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", an_argument[\"type\"]) if matched is", "self._get_class_realname(cpp_object[\"class\"]) == cpp_object[\"name\"]: # Constructor / Destructor will the same", "0 def main(): a = Application() sys.exit(a.exec_()) if __name__ ==", "name break self._validate_name(cpp_object, \"class_method\") break elif cpp_object_type == CppHeaderParser.CppUnion: self._validate_name(cpp_object,", "then skip all other searching if (c == \"*\") and", "# If finding block comment, then skip all other searching", "# Exclude \"main\" function while parsing global function while True:", "0: content = content[first_i:last_i] founded = re.findall(r\"\\w+\\W+(\\w+)\\s*=[^=]\", content) for aname", "def _is_valid_variable(self, cpp_variable): if cpp_variable[\"type\"] == \"return\": return False if", "cpp_object[\"name\"] = cpp_object_name cpp_object[\"line_number\"] = -1 elif \"name\" in cpp_object:", "True elif c == \"{\": stack.append(i) elif c == \"}\":", "rule_name in name_re.split(\"_\")]), error_message) if self.__args.debug: traceback.print_stack() raise SyntaxError(\"%s:%s:error: Name", "FIXME: Parse special case : \"struct RArraySize <T ( &", "if is_need_reraise: raise else: an_object[\"name\"] = self._get_argument_name(aparameter) self._validate_name(an_object, class_method_argument_re) else:", "matched = re.match(r\"[^\\w]*(\\w+)(?:\\(([^\\)]*)\\)|\\s*).*\", adefine) name = matched.group(1) parameters = []", "for aparameter in cpp_object[\"parameters\"]: self._validate_name(aparameter, \"define_function_argument\") elif cpp_object_type == CppHeaderParser.CppClass:", "!= self._get_class_realname(cpp_object[\"name\"])) and (not amethod.get(\"constructor\", False)) and (not amethod.get(\"destructor\", False))):", "_validate_codes_of_cpp_method(self, cpp_method): start_line_index = cpp_method[\"line_number\"] - 1 # Extract cpp", "re.match(r\"^\\w+\\s*\\(\\w*::\\*(\\w+)\\)\\(.*$\", an_argument[\"type\"]) if matched is None: # with normal \"function\"", "= None self[\"line_number\"] = -1 class CppFileName(dict): def __init__(self): self[\"name\"]", "parsed_info.functions: self._validate_cpp_object(cpp_object) # Verify Class Names for cpp_object in parsed_info.classes_order:", ": %s! %s\" % ( filename, cpp_object[\"line_number\"], cpp_object_name, name_re, error_message))", "\"function\", \"class_method\": \"function\", \"struct_method\": \"class_method\", \"class_variant\": \"variant\", \"struct_variant\": \"class_variant\", \"typedef\":", "return \"operator%s\" % ''.join(chars) def _validate_codes_of_cpp_method(self, cpp_method): start_line_index = cpp_method[\"line_number\"]", "parameters_start_pos) stack = [] try: i = content.index('{', parameters_stop_pos +", "_validate_codes_of_cpp_method() self._source_lines = source_file.readlines() parsed_info = CppHeaderParser.CppHeader(self.__args.file_path) # Verify File", "= False is_finding_single_comment = False while (len(stack) > 0) and", "else: return matched.group(1) def _get_config(self, name): override_table = { \"class\":", "path (In YAML format)\", required=True) parser.add_argument(\"-o\", \"--output\", help=\"Output file path\")", "chars.append(\"\\\\\") chars.append(achar) return \"operator%s\" % ''.join(chars) def _validate_codes_of_cpp_method(self, cpp_method): start_line_index", "if isinstance(cpp_object, six.string_types): cpp_object_name = cpp_object cpp_object = dict() cpp_object[\"name\"]", "False is_finding_single_comment = False while (len(stack) > 0) and (i", "elif cpp_object_type == CppHeaderParser.CppStruct: self._validate_name(cpp_object, \"struct\") elif cpp_object_type == CppHeaderParser.CppEnum:", "elif (c == \"/\") and (content[i + 1] == \"*\"):", "cpp_method): start_line_index = cpp_method[\"line_number\"] - 1 # Extract cpp method", "if cpp_object_type == CppDefine: if len(cpp_object[\"parameters\"]) <= 0: # Normal", "== CppHeaderParser.CppClass: if \"struct\" in cpp_object[\"declaration_method\"]: class_re = \"struct\" class_method_re", "self._validate_name(cpp_object, \"global_variant\") elif cpp_object_type == CppHeaderParser.CppMethod: # Exclude \"main\" function", "content.index(')', parameters_start_pos) stack = [] try: i = content.index('{', parameters_stop_pos", "Variable Names for cpp_object in parsed_info.variables: # Avoid checking member", "= self.__args.file_path self.__config = yaml.load(open(self.__args.config)) old_base = self.__config[\"_base_\"] self.__config[\"_base_\"] =", "and (amember[\"type\"] is not None): internal_predeclares = [\"class\", \"struct\", \"union\"]", "= r\"^[^<:]*?(?:(\\w+)::)?(\\w+)\\s*<\" matched = re.search(regex, cpp_object[\"debug\"]) if matched.group(1) is not", "path\") parser.add_argument(\"-d\", \"--debug\", action='store_true', help=\"Print trace stack\") parser.add_argument(\"file_path\", help=\"Source file", "invokes. # FIXME: We just ignored this situation: # Code", "except ValueError: return; try: semicolonPos = content.index(';', parameters_stop_pos + 1)", "raise else: an_object[\"name\"] = self._get_argument_name(aparameter) self._validate_name(an_object, class_method_argument_re) else: self._validate_name( {\"name\":matched.group(1),", "= cpp_object[\"name\"] if ('<' in cpp_object_name) and (\"debug\" in cpp_object):", "Name '%s' isn't matched with rule : %s! %s\" %", "\"class_variant\" self._validate_name(cpp_object, class_re) for amethod in cpp_object.get_all_methods(): matched = re.match(r\".*typedef\\W[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\",", "break if self._is_special_method(cpp_object): break if (cpp_object[\"class\"] is None) or (len(cpp_object[\"class\"])", "\"function\": \"_base_\", \"variant\": \"_base_\", \"namespace\": \"_base_\", \"define\": \"_base_\", \"filename\": \"_base_\",", "== cpp_object[\"name\"]: # Constructor / Destructor will the same with", "filename self._validate_cpp_object(cpp_object) # Verify Define Names for define_text in parsed_info.defines:", "self._validate_cpp_object(amember) for amember in cpp_object[\"enums\"][access_specifier]: self._validate_cpp_object(amember) elif cpp_object_type == CppHeaderParser.CppStruct:", "path, we default it to input file # path if", "\"define\": \"_base_\", \"filename\": \"_base_\", # Special config use to define", "result[\"parameters\"] = parameters return result def _is_special_method(self, amethod): if isinstance(amethod,", "is not None: cpp_object_name = matched.group(1) else: return # Parse", "\"_base_\", # Special config use to define filename rule \"argument\":", "\"error\": \"\", } self.__config[\"_base_\"].update(old_base) def parse_define(self, adefine): matched = re.match(r\"[^\\w]*(\\w+)(?:\\(([^\\)]*)\\)|\\s*).*\",", "self._validate_cpp_object(cpp_object) # Verify Enum Names for cpp_object in parsed_info.enums: self._validate_cpp_object(cpp_object)", "= cpp_object_name.split() if len(splitted) > 1: cpp_object_name = splitted[-1] if", "= yaml.load(open(self.__args.config)) old_base = self.__config[\"_base_\"] self.__config[\"_base_\"] = { \"re\":\"[a-zA-Z0-9_]+\", \"error\":", "checking member variable inside function body. if '{' not in", "name in self.__config: my_config.update(self.__config[name]) return my_config def _is_valid_variable(self, cpp_variable): if", "it . return matched = re.match(self._get_config(name_re)[\"re\"], cpp_object_name) if matched is", "filename, cpp_object[\"line_number\"], cpp_object_name, name_re, error_message)) def _get_class_realname(self, class_name): return re.match(r\"(\\w+).*\",", "CppHeaderParser can't parse this file, but we should pass it,", "== \"/\"): is_finding_block_comment = False elif (c == \"/\") and", "format)\", required=True) parser.add_argument(\"-o\", \"--output\", help=\"Output file path\") parser.add_argument(\"-d\", \"--debug\", action='store_true',", "\"union\"] if amember[\"type\"] in internal_predeclares: is_skip_validate = True if not", "content.count(\"\\n\", 0, i) - 2 stack.append(i) i += 1 first_i", "does not have return value(at least # \"void\"), it maybe", "= line_number self._validate_name(amember, \"enum_value\") elif cpp_object_type == CppHeaderParser.CppVariable: if cpp_object[\"type\"]", "elif cpp_object_type == CppHeaderParser.CppEnum: self._validate_name(cpp_object, \"enum\") line_number = -1 if", "dict() an_object[\"line_number\"] = aparameter[\"line_number\"] if (aparameter[\"type\"].endswith(\"::*\") and (\")\" in aparameter[\"name\"])):", "cpp_object: line_number = cpp_object[\"line_number\"] for amember in cpp_object[\"values\"]: # Use", "\"define_function_argument\": \"function_argument\", \"define_function\": \"function\", \"class_method\": \"function\", \"struct_method\": \"class_method\", \"class_variant\": \"variant\",", "\"class_variant\": \"variant\", \"struct_variant\": \"class_variant\", \"typedef\": \"class\", \"struct\": \"class\", \"enum\": \"class\",", "in override_table: base_name = override_table[name] my_config.update(self._get_config(base_name)) if name in self.__config:", "\"enum\") line_number = -1 if \"line_number\" in cpp_object: line_number =", "If finding single comment, then skip all other searching if", "try: semicolonPos = content.index(';', parameters_stop_pos + 1) if semicolonPos <=", "\"class_method_argument\": \"function_argument\", \"struct_method_argument\": \"class_method_argument\", \"define_function_argument\": \"function_argument\", \"define_function\": \"function\", \"class_method\": \"function\",", "def __init__(self): self[\"name\"] = None self[\"line_number\"] = -1 class CppNamespace(dict):", "class_variant_re) for amember in cpp_object[\"structs\"][access_specifier]: self._validate_cpp_object(amember) for amember in cpp_object[\"enums\"][access_specifier]:", "cpp_object[\"name\"] if ('<' in cpp_object_name) and (\"debug\" in cpp_object): matched", "class Application(object): def __init__(self): description='''A styler just target to naming", "if not cpp_object[\"type\"].endswith(\"::\"): # Don't parse variable that implemented outside", "\"struct_method_argument\" class_variant_re = \"struct_variant\" else: class_re = \"class\" class_method_re =", "cpp_object_name, name_re, error_message)) def _get_class_realname(self, class_name): return re.match(r\"(\\w+).*\", class_name).group(1) def", "\">\" in cpp_object[\"name\"]: regex = r\"^[^<:]*?(?:(\\w+)::)?(\\w+)\\s*<\" matched = re.search(regex, cpp_object[\"debug\"])", "\"define_function\") is_need_reraise = False except SyntaxError: pass if is_need_reraise: raise", "cpp_object in parsed_info.enums: self._validate_cpp_object(cpp_object) # Verify Variable Names for cpp_object", "my_config.update(self._get_config(base_name)) if name in self.__config: my_config.update(self.__config[name]) return my_config def _is_valid_variable(self,", "= re.match(r\"[^\\w]*(\\w+)(?:\\(([^\\)]*)\\)|\\s*).*\", adefine) name = matched.group(1) parameters = [] if", "return 0 return 0 def main(): a = Application() sys.exit(a.exec_())", "static RSignal<void(int)> sReceived; if \"<\" not in cpp_object[\"name\"]: self._validate_name(cpp_object, \"function\")", "parsing global function while True: # FIXME: Parse special case", "Class Names for cpp_object in parsed_info.classes_order: self._validate_cpp_object(cpp_object) # Verify Struct", "False elif (c == \"/\") and (content[i + 1] ==", "avariant[\"line_number\"] = cpp_method[\"line_number\"] self._validate_name(avariant, \"variant\") def _validate_name(self, cpp_object, name_re): cpp_object_name", "1 first_i = i last_i = 0 is_finding_block_comment = False", "if cpp_object[\"static\"]: self._validate_name(cpp_object, \"static_variant\") elif cpp_object[\"type\"] not in [\"class\", \"struct\",", "= content.index('{', parameters_stop_pos + 1) except ValueError: return; try: semicolonPos", "cpp_object[\"line_number\"], cpp_object_name, name_re, error_message)) def _get_class_realname(self, class_name): return re.match(r\"(\\w+).*\", class_name).group(1)", "parameter_names = matched.group(2).split(',') for parameter_name in parameter_names: aparameter = CppDefineParameter()", "name_start_pos) parameters_stop_pos = content.index(')', parameters_start_pos) stack = [] try: i", "If it's a functor?? with \"class name::function\" style matched =", "amember in cpp_object[\"enums\"][access_specifier]: self._validate_cpp_object(amember) elif cpp_object_type == CppHeaderParser.CppStruct: self._validate_name(cpp_object, \"struct\")", "all other searching if (c == \"*\") and (content[i +", "matched is None: return \"\" else: return matched.group(1) def _get_config(self,", "achar in name[len(prefix):]: chars.append(\"\\\\s*\") if achar.isalnum(): chars.append(achar) else: chars.append(\"\\\\\") chars.append(achar)", "outside of # template class. It's already be parsed when", "in cpp_object.get_all_methods(): matched = re.match(r\".*typedef\\W[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", amethod[\"debug\"]) if matched is None:", "# the class. self._validate_name(cpp_object, \"global_variant\") elif cpp_object_type == CppHeaderParser.CppMethod: #", "six import os.path import traceback class CppDefine(dict): def __init__(self): self[\"name\"]", "python import argparse import CppHeaderParser import re import sys import", "aname avariant[\"line_number\"] = cpp_method[\"line_number\"] self._validate_name(avariant, \"variant\") def _validate_name(self, cpp_object, name_re):", "we must not check it . return if len(cpp_object_name) <=", "for parameter_name in parameter_names: aparameter = CppDefineParameter() aparameter[\"name\"] = parameter_name.strip()", "len(cpp_variable[\"type\"]) <= 0: return False return True def _get_cpp_method_re(self, name):", "ignored this situation: # Code Snippets: static RSignal<void(int)> sReceived; if", "least # \"void\"), it maybe macro invokes. # FIXME: We", "break elif cpp_object_type == CppHeaderParser.CppUnion: self._validate_name(cpp_object, \"union\") elif cpp_object_type ==", "first_i = i last_i = 0 is_finding_block_comment = False is_finding_single_comment", "def __init__(self): self[\"name\"] = None self[\"line_number\"] = -1 class Application(object):", "_get_argument_name(self, an_argument): if isinstance(an_argument, six.string_types): return an_argument if len(an_argument[\"name\"]) >", "print(str(e)) return 1 except CppHeaderParser.CppHeaderParser.CppParseError as e: # CppHeaderParser can't", "<= 0: # Does not have valid name, we must", "of source code''' parser = argparse.ArgumentParser(description=description) parser.add_argument(\"-c\", \"--config\", help=\"Configuration file", "the CppHeaderParser's problem. print(str(e)) return 0 return 0 def main():", "namespace self._validate_cpp_object(cpp_object) # Verify Typdef Names for cpp_object in parsed_info.typedefs:", "\"struct_variant\" else: class_re = \"class\" class_method_re = \"class_method\" class_method_argument_re =", "(cpp_object[\"class\"] is None) or (len(cpp_object[\"class\"]) <= 0): if \">\" in", "parser = argparse.ArgumentParser(description=description) parser.add_argument(\"-c\", \"--config\", help=\"Configuration file path (In YAML", "= dict() if name in override_table: base_name = override_table[name] my_config.update(self._get_config(base_name))", "body. if '{' not in cpp_object['type']: self._validate_cpp_object(cpp_object) for namespace in", "(In YAML format)\", required=True) parser.add_argument(\"-o\", \"--output\", help=\"Output file path\") parser.add_argument(\"-d\",", "in cpp_object[\"enums\"][access_specifier]: self._validate_cpp_object(amember) elif cpp_object_type == CppHeaderParser.CppStruct: self._validate_name(cpp_object, \"struct\") elif", "# If user does not specific output path, we default", "content[first_i:last_i] founded = re.findall(r\"\\w+\\W+(\\w+)\\s*=[^=]\", content) for aname in founded: avariant", "content.index('{', parameters_stop_pos + 1) except ValueError: return; try: semicolonPos =", "self._validate_name(cpp_object, \"struct\") elif cpp_object_type == CppHeaderParser.CppEnum: self._validate_name(cpp_object, \"enum\") line_number =", "amember in cpp_object[\"values\"]: # Use parent line number if enum", "self._is_special_method(cpp_object): break if (cpp_object[\"class\"] is None) or (len(cpp_object[\"class\"]) <= 0):", "Liked Define Name self._validate_name(cpp_object, \"define_function\") for aparameter in cpp_object[\"parameters\"]: self._validate_name(aparameter,", "is None: filename = os.path.basename(self.__args.file_path) error_message = self._get_config(name_re)[\"error\"] if len(error_message)", ". return matched = re.match(self._get_config(name_re)[\"re\"], cpp_object_name) if matched is None:", "self._validate_name(amember, \"static_variant\") else: self._validate_name(amember, class_variant_re) for amember in cpp_object[\"structs\"][access_specifier]: self._validate_cpp_object(amember)", "def _get_cpp_method_re(self, name): prefix = \"operator\" if not name.startswith(prefix): return", "an_argument): if isinstance(an_argument, six.string_types): return an_argument if len(an_argument[\"name\"]) > 0:", "self.__config[\"_base_\"] = { \"re\":\"[a-zA-Z0-9_]+\", \"error\": \"\", } self.__config[\"_base_\"].update(old_base) def parse_define(self,", "% ( ' '.join([rule_name.capitalize() for rule_name in name_re.split(\"_\")]), error_message) if", "{\" if \"debug\" in cpp_object: if re.match(r\".*\\>\\s*{$\", cpp_object[\"debug\"]) is not", "= True try: self._validate_name(amethod, \"define_function\") is_need_reraise = False except SyntaxError:", "in parsed_info.defines: self._validate_cpp_object(self.parse_define(define_text)) # Verify Function Names for cpp_object in", "[] if matched.group(2) is not None: parameter_names = matched.group(2).split(',') for", "cpp_object_name) if matched is None: filename = os.path.basename(self.__args.file_path) error_message =", "= CppDefine() result[\"name\"] = name result[\"parameters\"] = parameters return result", "= -1 class Application(object): def __init__(self): description='''A styler just target", "return True def _get_cpp_method_re(self, name): prefix = \"operator\" if not", "True def _get_argument_name(self, an_argument): if isinstance(an_argument, six.string_types): return an_argument if", "it maybe macro invokes. # FIXME: We just ignored this", "[] name_re = self._get_cpp_method_re(cpp_method[\"name\"]) name_start_pos = re.search(name_re, content).span()[0] parameters_start_pos =", "__init__(self): description='''A styler just target to naming conventions of source", "name.startswith(prefix): return re.escape(name) # Operator methods chars = [] for", "\"static_variant\": \"variant\", \"global_variant\": \"variant\", \"function_argument\": \"argument\", \"class_method_argument\": \"function_argument\", \"struct_method_argument\": \"class_method_argument\",", "SyntaxError as e: print(str(e)) return 1 except CppHeaderParser.CppHeaderParser.CppParseError as e:", "1] == \"/\"): is_finding_single_comment = True elif c == \"{\":", "not have it's line # number if \"line_number\" not in", "\"variant\", \"static_variant\": \"variant\", \"global_variant\": \"variant\", \"function_argument\": \"argument\", \"class_method_argument\": \"function_argument\", \"struct_method_argument\":", "def _validate_codes_of_cpp_method(self, cpp_method): start_line_index = cpp_method[\"line_number\"] - 1 # Extract", "\"line_number\":amethod[\"line_number\"]}, \"typedef\") for access_specifier in CppHeaderParser.supportedAccessSpecifier: for amember in cpp_object[\"properties\"][access_specifier]:", "(i < len(content)): c = content[i] if is_finding_block_comment: # If", "for aname in founded: avariant = dict() avariant[\"name\"] = aname", "cpp_object[\"name\"]: self._validate_name(cpp_object, \"function\") break if self._get_class_realname(cpp_object[\"class\"]) == cpp_object[\"name\"]: # Constructor", "rule : %s! %s\" % ( filename, cpp_object[\"line_number\"], cpp_object_name, name_re,", "+= 1 first_i = i last_i = 0 is_finding_block_comment =", "\"/\"): is_finding_single_comment = True elif c == \"{\": stack.append(i) elif", "if \"debug\" in cpp_object: if re.match(r\".*\\>\\s*{$\", cpp_object[\"debug\"]) is not None:", "<reponame>starofrainnight/ncstyler<gh_stars>0 #!/usr/bin/env python import argparse import CppHeaderParser import re import", "internal_predeclares = [\"class\", \"struct\", \"union\"] if amember[\"type\"] in internal_predeclares: is_skip_validate", "self[\"line_number\"] = -1 class CppFileName(dict): def __init__(self): self[\"name\"] = None", "(len(stack) > 0) and (i < len(content)): c = content[i]", "# Verify Class Names for cpp_object in parsed_info.classes_order: self._validate_cpp_object(cpp_object) #", "last_i = 0 is_finding_block_comment = False is_finding_single_comment = False while", "True elif is_finding_single_comment: # If finding single comment, then skip", "class name break self._validate_name(cpp_object, \"class_method\") break elif cpp_object_type == CppHeaderParser.CppUnion:", "if not name.startswith(prefix): return re.escape(name) # Operator methods chars =", "if '{' not in cpp_object['type']: self._validate_cpp_object(cpp_object) for namespace in parsed_info.namespaces:", "0: # If a function does not have return value(at", "= True if not is_skip_validate: if amember[\"static\"]: self._validate_name(amember, \"static_variant\") else:", "is None: self._validate_codes_of_cpp_method(amethod) if not self._is_special_method(amethod): if ((amethod[\"name\"] != self._get_class_realname(cpp_object[\"name\"]))", "= -1 class CppNamespace(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"]", "line number if enum value does not have it's line", "Name self._validate_name(cpp_object, \"define_function\") for aparameter in cpp_object[\"parameters\"]: self._validate_name(aparameter, \"define_function_argument\") elif", "does not specific output path, we default it to input", "cpp_object in parsed_info.classes_order: self._validate_cpp_object(cpp_object) # Verify Struct Names for cpp_object", "def main(): a = Application() sys.exit(a.exec_()) if __name__ == \"__main__\":", "Does not have valid name, we must not check it", "= None self[\"line_number\"] = -1 class Application(object): def __init__(self): description='''A", "valid name, we must not check it . return if", "class. self._validate_name(cpp_object, \"global_variant\") elif cpp_object_type == CppHeaderParser.CppMethod: # Exclude \"main\"", "if \"<\" not in cpp_object[\"name\"]: self._validate_name(cpp_object, \"function\") break if self._get_class_realname(cpp_object[\"class\"])", "except ValueError: # Not found a semicolon, just ignored. pass", "chars.append(achar) return \"operator%s\" % ''.join(chars) def _validate_codes_of_cpp_method(self, cpp_method): start_line_index =", "[\"class\", \"struct\", \"union\"]: if not cpp_object[\"type\"].endswith(\"::\"): # Don't parse variable", "+ 1] == \"*\"): is_finding_block_comment = True elif is_finding_single_comment: #", "1: cpp_object_name = splitted[-1] if '...' in cpp_object_name: # Does", "del stack[len(stack) - 1] i += 1 if len(stack) <=", "1] == \"*\"): is_finding_block_comment = True elif is_finding_single_comment: # If", "elif c == \"}\": last_i = i del stack[len(stack) -", "i last_i = 0 is_finding_block_comment = False is_finding_single_comment = False", "\"class_method\": \"function\", \"struct_method\": \"class_method\", \"class_variant\": \"variant\", \"struct_variant\": \"class_variant\", \"typedef\": \"class\",", "cpp_variable): if cpp_variable[\"type\"] == \"return\": return False if len(cpp_variable[\"type\"]) <=", "cpp_object[\"type\"] not in [\"class\", \"struct\", \"union\"]: if not cpp_object[\"type\"].endswith(\"::\"): #", "parsed_info.defines: self._validate_cpp_object(self.parse_define(define_text)) # Verify Function Names for cpp_object in parsed_info.functions:", "= None self[\"line_number\"] = -1 class CppNamespace(dict): def __init__(self): self[\"name\"]", "code_lines = [] name_re = self._get_cpp_method_re(cpp_method[\"name\"]) name_start_pos = re.search(name_re, content).span()[0]", "if enum value does not have it's line # number", "matched = re.match(r\"^\\w+\\s*\\(\\w*::\\*(\\w+)\\)\\(.*$\", an_argument[\"type\"]) if matched is None: # with", "like names splitted = cpp_object_name.split() if len(splitted) > 1: cpp_object_name", "if cpp_variable[\"type\"] == \"return\": return False if len(cpp_variable[\"type\"]) <= 0:", "None: return True return False return True def _get_argument_name(self, an_argument):", "while (len(stack) > 0) and (i < len(content)): c =", "class. It's already be parsed when parsing # the class.", "def _get_class_realname(self, class_name): return re.match(r\"(\\w+).*\", class_name).group(1) def _validate_cpp_object(self, cpp_object): cpp_object_type", "with \"class name::function\" style matched = re.match(r\"^\\w+\\s*\\(\\w*::\\*(\\w+)\\)\\(.*$\", an_argument[\"type\"]) if matched", "= source_file.readlines() parsed_info = CppHeaderParser.CppHeader(self.__args.file_path) # Verify File Names filename", "\"name\" in cpp_object: cpp_object_name = cpp_object[\"name\"] if ('<' in cpp_object_name)", "\"function\") break if self._get_class_realname(cpp_object[\"class\"]) == cpp_object[\"name\"]: # Constructor / Destructor", "\"static_variant\") else: self._validate_name(amember, class_variant_re) for amember in cpp_object[\"structs\"][access_specifier]: self._validate_cpp_object(amember) for", "matched.group(1) is not None: cpp_object[\"class\"] = matched.group(1) cpp_object[\"name\"] = matched.group(2)", "\"--debug\", action='store_true', help=\"Print trace stack\") parser.add_argument(\"file_path\", help=\"Source file path\") self.__args", "\"struct\", \"union\"]: if not cpp_object[\"type\"].endswith(\"::\"): # Don't parse variable that", "not have valid name, we must not check it .", "# Use parent line number if enum value does not", "\"main\" function while parsing global function while True: # FIXME:", "style matched = re.match(r\"[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", an_argument[\"type\"]) if matched is None: return", "# Verify Enum Names for cpp_object in parsed_info.enums: self._validate_cpp_object(cpp_object) #", "= parser.parse_args() # If user does not specific output path,", "self.__args.file_path self.__config = yaml.load(open(self.__args.config)) old_base = self.__config[\"_base_\"] self.__config[\"_base_\"] = {", "return an_argument if len(an_argument[\"name\"]) > 0: return an_argument[\"name\"] # If", "matched is not None: cpp_object_name = matched.group(1) else: return #", "_is_special_method(self, amethod): if isinstance(amethod, six.string_types): amethod_name = amethod else: amethod_name", "CppHeaderParser import re import sys import yaml import copy import", "parameters_start_pos = content.index('(', name_start_pos) parameters_stop_pos = content.index(')', parameters_start_pos) stack =", "not None: cpp_object[\"class\"] = matched.group(1) cpp_object[\"name\"] = matched.group(2) self._validate_name(cpp_object, \"class_method\")", "must not check it . return matched = re.match(self._get_config(name_re)[\"re\"], cpp_object_name)", "to naming conventions of source code''' parser = argparse.ArgumentParser(description=description) parser.add_argument(\"-c\",", "parse by _validate_codes_of_cpp_method() self._source_lines = source_file.readlines() parsed_info = CppHeaderParser.CppHeader(self.__args.file_path) #", "\"_base_\", \"define\": \"_base_\", \"filename\": \"_base_\", # Special config use to", "amethod[\"debug\"]) is not None: return True return False return True", "amethod_name = amethod[\"name\"] founded = re.findall(r\"(?:^|[^\\w]+)operator[^\\w]+\", amethod_name) if len(founded) <=", "cpp_object in parsed_info.functions: self._validate_cpp_object(cpp_object) # Verify Class Names for cpp_object", "# If a function does not have return value(at least", "cpp method codes rest_lines = self._source_lines[start_line_index:] content = '\\n'.join(rest_lines) code_lines", "in cpp_object: cpp_object_name = cpp_object[\"name\"] if ('<' in cpp_object_name) and", "argparse.ArgumentParser(description=description) parser.add_argument(\"-c\", \"--config\", help=\"Configuration file path (In YAML format)\", required=True)", "File Names filename = os.path.basename(self.__args.file_path) cpp_object = CppFileName() cpp_object[\"name\"] =", "rest_lines = self._source_lines[start_line_index:] content = '\\n'.join(rest_lines) code_lines = [] name_re", "_get_class_realname(self, class_name): return re.match(r\"(\\w+).*\", class_name).group(1) def _validate_cpp_object(self, cpp_object): cpp_object_type =", "in cpp_object_name: # Does not have valid name, we must", "for amethod in cpp_object.get_all_methods(): matched = re.match(r\".*typedef\\W[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", amethod[\"debug\"]) if matched", "CppNamespace: self._validate_name(cpp_object, \"namespace\") elif cpp_object_type == CppFileName: self._validate_name(cpp_object, \"filename\") def", "\"struct_method\" class_method_argument_re = \"struct_method_argument\" class_variant_re = \"struct_variant\" else: class_re =", "= \"operator\" if not name.startswith(prefix): return re.escape(name) # Operator methods", "the class. self._validate_name(cpp_object, \"global_variant\") elif cpp_object_type == CppHeaderParser.CppMethod: # Exclude", "amember[\"line_number\"] = line_number self._validate_name(amember, \"enum_value\") elif cpp_object_type == CppHeaderParser.CppVariable: if", "%s! %s\" % ( filename, cpp_object[\"line_number\"], cpp_object_name, name_re, error_message)) def", "is_skip_validate = True if not is_skip_validate: if amember[\"static\"]: self._validate_name(amember, \"static_variant\")", "None self[\"line_number\"] = -1 class CppFileName(dict): def __init__(self): self[\"name\"] =", "= matched.group(2).split(',') for parameter_name in parameter_names: aparameter = CppDefineParameter() aparameter[\"name\"]", "an_argument[\"type\"]) if matched is None: # with normal \"function\" style", "Use parent line number if enum value does not have", "not in amember: amember[\"line_number\"] = line_number self._validate_name(amember, \"enum_value\") elif cpp_object_type", "It's already be parsed when parsing # the class. self._validate_name(cpp_object,", "self._get_cpp_method_re(cpp_method[\"name\"]) name_start_pos = re.search(name_re, content).span()[0] parameters_start_pos = content.index('(', name_start_pos) parameters_stop_pos", "if not self._is_special_method(amethod): if ((amethod[\"name\"] != self._get_class_realname(cpp_object[\"name\"])) and (not amethod.get(\"constructor\",", "dict() avariant[\"name\"] = aname avariant[\"line_number\"] = cpp_method[\"line_number\"] self._validate_name(avariant, \"variant\") def", "Name self._validate_name(cpp_object, \"define\") else: # Function Liked Define Name self._validate_name(cpp_object,", "\"struct\" in cpp_object[\"declaration_method\"]: class_re = \"struct\" class_method_re = \"struct_method\" class_method_argument_re", "except SyntaxError: is_need_reraise = True try: self._validate_name(amethod, \"define_function\") is_need_reraise =", "self._validate_name(cpp_object, \"class_method\") elif len(cpp_object[\"returns\"]) > 0: # If a function", "\"argument\": \"variant\", \"static_variant\": \"variant\", \"global_variant\": \"variant\", \"function_argument\": \"argument\", \"class_method_argument\": \"function_argument\",", "\"namespace\") elif cpp_object_type == CppFileName: self._validate_name(cpp_object, \"filename\") def exec_(self): try:", "founded = re.findall(r\"(?:^|[^\\w]+)operator[^\\w]+\", amethod_name) if len(founded) <= 0: if re.match(r\"(?:^|.*\\W)operator\\W.*\",", "content = content[first_i:last_i] founded = re.findall(r\"\\w+\\W+(\\w+)\\s*=[^=]\", content) for aname in", "= cpp_method[\"line_number\"] self._validate_name(avariant, \"variant\") def _validate_name(self, cpp_object, name_re): cpp_object_name =", "Struct Names for cpp_object in parsed_info.structs_order: self._validate_cpp_object(cpp_object) # Verify Enum", "function while parsing global function while True: # FIXME: Parse", "= matched.group(1) else: return # Parse union like names splitted", "for amember in cpp_object[\"values\"]: # Use parent line number if", "number if enum value does not have it's line #", "is_finding_block_comment = False elif (c == \"/\") and (content[i +", "in cpp_object[\"declaration_method\"]: class_re = \"struct\" class_method_re = \"struct_method\" class_method_argument_re =", "i += 1 first_i = i last_i = 0 is_finding_block_comment", "source_file: # For later parse by _validate_codes_of_cpp_method() self._source_lines = source_file.readlines()", "True return False return True def _get_argument_name(self, an_argument): if isinstance(an_argument,", "\"return\": return False if len(cpp_variable[\"type\"]) <= 0: return False return", "self[\"line_number\"] = -1 class CppNamespace(dict): def __init__(self): self[\"name\"] = None", "if (cpp_object[\"class\"] is None) or (len(cpp_object[\"class\"]) <= 0): if \">\"", "matched is None: # with normal \"function\" style matched =", "Verify Define Names for define_text in parsed_info.defines: self._validate_cpp_object(self.parse_define(define_text)) # Verify", "( ' '.join([rule_name.capitalize() for rule_name in name_re.split(\"_\")]), error_message) if self.__args.debug:", "= [] for achar in name[len(prefix):]: chars.append(\"\\\\s*\") if achar.isalnum(): chars.append(achar)", "self._validate_name(amember, \"enum_value\") elif cpp_object_type == CppHeaderParser.CppVariable: if cpp_object[\"type\"] != \"return\":", "1 # Extract cpp method codes rest_lines = self._source_lines[start_line_index:] content", "Snippets: static RSignal<void(int)> sReceived; if \"<\" not in cpp_object[\"name\"]: self._validate_name(cpp_object,", "# Parse union like names splitted = cpp_object_name.split() if len(splitted)", "is_need_reraise = True try: self._validate_name(amethod, \"define_function\") is_need_reraise = False except", "len(an_argument[\"name\"]) > 0: return an_argument[\"name\"] # If it's a functor??", "cpp_object[\"enums\"][access_specifier]: self._validate_cpp_object(amember) elif cpp_object_type == CppHeaderParser.CppStruct: self._validate_name(cpp_object, \"struct\") elif cpp_object_type", "\"class\", \"enum_value\": \"define\", \"union\": \"struct\", } my_config = dict() if", "Define Names for define_text in parsed_info.defines: self._validate_cpp_object(self.parse_define(define_text)) # Verify Function", "\"variant\", \"function_argument\": \"argument\", \"class_method_argument\": \"function_argument\", \"struct_method_argument\": \"class_method_argument\", \"define_function_argument\": \"function_argument\", \"define_function\":", "methods chars = [] for achar in name[len(prefix):]: chars.append(\"\\\\s*\") if", "define filename rule \"argument\": \"variant\", \"static_variant\": \"variant\", \"global_variant\": \"variant\", \"function_argument\":", "old_base = self.__config[\"_base_\"] self.__config[\"_base_\"] = { \"re\":\"[a-zA-Z0-9_]+\", \"error\": \"\", }", "Define Name self._validate_name(cpp_object, \"define\") else: # Function Liked Define Name", "raise for aparameter in amethod[\"parameters\"]: an_object = dict() an_object[\"line_number\"] =", "splitted[-1] if '...' in cpp_object_name: # Does not have valid", ") [ N ]> {\" if \"debug\" in cpp_object: if", "self._validate_name(cpp_object, \"enum\") line_number = -1 if \"line_number\" in cpp_object: line_number", "% ( filename, cpp_object[\"line_number\"], cpp_object_name, name_re, error_message)) def _get_class_realname(self, class_name):", "\"_base_\", \"namespace\": \"_base_\", \"define\": \"_base_\", \"filename\": \"_base_\", # Special config", "self._validate_name(aparameter, \"define_function_argument\") elif cpp_object_type == CppHeaderParser.CppClass: if \"struct\" in cpp_object[\"declaration_method\"]:", "0: # Does not have valid name, we must not", "self.__args = parser.parse_args() # If user does not specific output", "for cpp_object in parsed_info.enums: self._validate_cpp_object(cpp_object) # Verify Variable Names for", "name_re): cpp_object_name = \"\" if isinstance(cpp_object, six.string_types): cpp_object_name = cpp_object", "True: # FIXME: Parse special case : \"struct RArraySize <T", "cpp_object cpp_object = dict() cpp_object[\"name\"] = cpp_object_name cpp_object[\"line_number\"] = -1", "# CppHeaderParser can't parse this file, but we should pass", "\"r\") as source_file: # For later parse by _validate_codes_of_cpp_method() self._source_lines", "= argparse.ArgumentParser(description=description) parser.add_argument(\"-c\", \"--config\", help=\"Configuration file path (In YAML format)\",", "= CppFileName() cpp_object[\"name\"] = filename self._validate_cpp_object(cpp_object) # Verify Define Names", "\"variant\", \"global_variant\": \"variant\", \"function_argument\": \"argument\", \"class_method_argument\": \"function_argument\", \"struct_method_argument\": \"class_method_argument\", \"define_function_argument\":", "parameter_name.strip() parameters.append(aparameter) result = CppDefine() result[\"name\"] = name result[\"parameters\"] =", "parsed_info.enums: self._validate_cpp_object(cpp_object) # Verify Variable Names for cpp_object in parsed_info.variables:", "c == \"{\": stack.append(i) elif c == \"}\": last_i =", "None self[\"line_number\"] = -1 class CppNamespace(dict): def __init__(self): self[\"name\"] =", "__init__(self): self[\"name\"] = None self[\"line_number\"] = -1 class Application(object): def", "if c == \"\\n\": is_finding_single_comment = False elif (c ==", "\"enum_value\": \"define\", \"union\": \"struct\", } my_config = dict() if name", "\"function\", \"struct_method\": \"class_method\", \"class_variant\": \"variant\", \"struct_variant\": \"class_variant\", \"typedef\": \"class\", \"struct\":", "else: amethod_name = amethod[\"name\"] founded = re.findall(r\"(?:^|[^\\w]+)operator[^\\w]+\", amethod_name) if len(founded)", "value(at least # \"void\"), it maybe macro invokes. # FIXME:", "traceback class CppDefine(dict): def __init__(self): self[\"name\"] = None self[\"parameters\"] =", "self._validate_name(amethod, \"define_function\") is_need_reraise = False except SyntaxError: pass if is_need_reraise:", "None self[\"parameters\"] = [] self[\"line_number\"] = -1 class CppDefineParameter(dict): def", "\"class_method\" class_method_argument_re = \"class_method_argument\" class_variant_re = \"class_variant\" self._validate_name(cpp_object, class_re) for", "parser.add_argument(\"-d\", \"--debug\", action='store_true', help=\"Print trace stack\") parser.add_argument(\"file_path\", help=\"Source file path\")", "sReceived; if \"<\" not in cpp_object[\"name\"]: self._validate_name(cpp_object, \"function\") break if", "aparameter[\"name\"])): an_object[\"name\"] = re.match(r\"(\\w+).*\", aparameter[\"name\"]).group(1) try: self._validate_name(an_object, class_method_re) except SyntaxError:", "== CppHeaderParser.CppEnum: self._validate_name(cpp_object, \"enum\") line_number = -1 if \"line_number\" in", "\"class_method\") elif len(cpp_object[\"returns\"]) > 0: # If a function does", "later parse by _validate_codes_of_cpp_method() self._source_lines = source_file.readlines() parsed_info = CppHeaderParser.CppHeader(self.__args.file_path)", "functor?? with \"class name::function\" style matched = re.match(r\"^\\w+\\s*\\(\\w*::\\*(\\w+)\\)\\(.*$\", an_argument[\"type\"]) if", "\"*\"): is_finding_block_comment = True elif is_finding_single_comment: # If finding single", "= parameter_name.strip() parameters.append(aparameter) result = CppDefine() result[\"name\"] = name result[\"parameters\"]", "CppDefineParameter() aparameter[\"name\"] = parameter_name.strip() parameters.append(aparameter) result = CppDefine() result[\"name\"] =", "self._validate_cpp_object(self.parse_define(define_text)) # Verify Function Names for cpp_object in parsed_info.functions: self._validate_cpp_object(cpp_object)", "e: # CppHeaderParser can't parse this file, but we should", "comment, then skip all other searching if c == \"\\n\":", "cpp_object in parsed_info.structs_order: self._validate_cpp_object(cpp_object) # Verify Enum Names for cpp_object", "isinstance(an_argument, six.string_types): return an_argument if len(an_argument[\"name\"]) > 0: return an_argument[\"name\"]", "Extract cpp method codes rest_lines = self._source_lines[start_line_index:] content = '\\n'.join(rest_lines)", "amethod in cpp_object.get_all_methods(): matched = re.match(r\".*typedef\\W[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", amethod[\"debug\"]) if matched is", "cpp_object.get_all_methods(): matched = re.match(r\".*typedef\\W[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", amethod[\"debug\"]) if matched is None: self._validate_codes_of_cpp_method(amethod)", "For later parse by _validate_codes_of_cpp_method() self._source_lines = source_file.readlines() parsed_info =", "if achar.isalnum(): chars.append(achar) else: chars.append(\"\\\\\") chars.append(achar) return \"operator%s\" % ''.join(chars)", "aparameter[\"line_number\"] if (aparameter[\"type\"].endswith(\"::*\") and (\")\" in aparameter[\"name\"])): an_object[\"name\"] = re.match(r\"(\\w+).*\",", "other searching if c == \"\\n\": is_finding_single_comment = False elif", "= Application() sys.exit(a.exec_()) if __name__ == \"__main__\": # Execute only", "= -1 class CppFileName(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"]", "class_method_re) except SyntaxError: is_need_reraise = True try: self._validate_name(amethod, \"define_function\") is_need_reraise", "self[\"line_number\"] = -1 class Application(object): def __init__(self): description='''A styler just", "\"class_method_argument\", \"define_function_argument\": \"function_argument\", \"define_function\": \"function\", \"class_method\": \"function\", \"struct_method\": \"class_method\", \"class_variant\":", "self._validate_name(cpp_object, \"filename\") def exec_(self): try: with open(self.__args.file_path, \"r\") as source_file:", "{ \"class\": \"_base_\", \"function\": \"_base_\", \"variant\": \"_base_\", \"namespace\": \"_base_\", \"define\":", "this situation: # Code Snippets: static RSignal<void(int)> sReceived; if \"<\"", "elif cpp_object[\"type\"] not in [\"class\", \"struct\", \"union\"]: if not cpp_object[\"type\"].endswith(\"::\"):", "self._validate_name(amethod, class_method_re) except SyntaxError: is_need_reraise = True try: self._validate_name(amethod, \"define_function\")", "len(error_message) > 0: error_message = \"%s %s\" % ( '", "if matched is None: self._validate_codes_of_cpp_method(amethod) if not self._is_special_method(amethod): if ((amethod[\"name\"]", "parsed when parsing # the class. self._validate_name(cpp_object, \"global_variant\") elif cpp_object_type", "\"global_variant\") elif cpp_object_type == CppHeaderParser.CppMethod: # Exclude \"main\" function while", "if amember[\"static\"]: self._validate_name(amember, \"static_variant\") else: self._validate_name(amember, class_variant_re) for amember in", "we should pass it, this # is the CppHeaderParser's problem.", "content.index('(', name_start_pos) parameters_stop_pos = content.index(')', parameters_start_pos) stack = [] try:", "= amethod[\"name\"] founded = re.findall(r\"(?:^|[^\\w]+)operator[^\\w]+\", amethod_name) if len(founded) <= 0:", "single comment, then skip all other searching if c ==", "amember) and (amember[\"type\"] is not None): internal_predeclares = [\"class\", \"struct\",", "re.search(regex, cpp_object[\"debug\"]) if matched.group(1) is not None: cpp_object[\"class\"] = matched.group(1)", "splitted = cpp_object_name.split() if len(splitted) > 1: cpp_object_name = splitted[-1]", "Verify Enum Names for cpp_object in parsed_info.enums: self._validate_cpp_object(cpp_object) # Verify", "= \"class_variant\" self._validate_name(cpp_object, class_re) for amethod in cpp_object.get_all_methods(): matched =", "amethod_name = amethod else: amethod_name = amethod[\"name\"] founded = re.findall(r\"(?:^|[^\\w]+)operator[^\\w]+\",", "help=\"Configuration file path (In YAML format)\", required=True) parser.add_argument(\"-o\", \"--output\", help=\"Output", "cpp_object_type == CppHeaderParser.CppStruct: self._validate_name(cpp_object, \"struct\") elif cpp_object_type == CppHeaderParser.CppEnum: self._validate_name(cpp_object,", ". return if len(cpp_object_name) <= 0: # Does not have", "an_argument[\"type\"]) if matched is None: return \"\" else: return matched.group(1)", "0: if re.match(r\"(?:^|.*\\W)operator\\W.*\", amethod[\"debug\"]) is not None: return True return", "not None: cpp_object_name = matched.group(1) else: return # Parse union", "for cpp_object in parsed_info.typedefs: self._validate_cpp_object(cpp_object) except SyntaxError as e: print(str(e))", "= os.path.basename(self.__args.file_path) cpp_object = CppFileName() cpp_object[\"name\"] = filename self._validate_cpp_object(cpp_object) #", "self._validate_name(cpp_object, \"define\") else: # Function Liked Define Name self._validate_name(cpp_object, \"define_function\")", "return True return False return True def _get_argument_name(self, an_argument): if", "in cpp_object[\"values\"]: # Use parent line number if enum value", "self._get_config(name_re)[\"error\"] if len(error_message) > 0: error_message = \"%s %s\" %", "input file # path if self.__args.output is None: self.__args.output =", "None: return \"\" else: return matched.group(1) def _get_config(self, name): override_table", "= type(cpp_object) if cpp_object_type == CppDefine: if len(cpp_object[\"parameters\"]) <= 0:", "self._validate_name(cpp_object, \"function\") break if self._get_class_realname(cpp_object[\"class\"]) == cpp_object[\"name\"]: # Constructor /", "= \"struct_variant\" else: class_re = \"class\" class_method_re = \"class_method\" class_method_argument_re", "parsed_info.structs_order: self._validate_cpp_object(cpp_object) # Verify Enum Names for cpp_object in parsed_info.enums:", "c == \"}\": last_i = i del stack[len(stack) - 1]", "= CppDefineParameter() aparameter[\"name\"] = parameter_name.strip() parameters.append(aparameter) result = CppDefine() result[\"name\"]", "len(cpp_object[\"parameters\"]) <= 0: # Normal Define Name self._validate_name(cpp_object, \"define\") else:", "finding single comment, then skip all other searching if c", "else: self._validate_name(amember, class_variant_re) for amember in cpp_object[\"structs\"][access_specifier]: self._validate_cpp_object(amember) for amember", "is_need_reraise: raise for aparameter in amethod[\"parameters\"]: an_object = dict() an_object[\"line_number\"]", "error_message = \"%s %s\" % ( ' '.join([rule_name.capitalize() for rule_name", "in parsed_info.structs_order: self._validate_cpp_object(cpp_object) # Verify Enum Names for cpp_object in", "parser.add_argument(\"file_path\", help=\"Source file path\") self.__args = parser.parse_args() # If user", "= re.findall(r\"\\w+\\W+(\\w+)\\s*=[^=]\", content) for aname in founded: avariant = dict()", "if not is_skip_validate: if amember[\"static\"]: self._validate_name(amember, \"static_variant\") else: self._validate_name(amember, class_variant_re)", "if self._get_class_realname(cpp_object[\"class\"]) == cpp_object[\"name\"]: # Constructor / Destructor will the", "CppDefine() result[\"name\"] = name result[\"parameters\"] = parameters return result def", "if name in override_table: base_name = override_table[name] my_config.update(self._get_config(base_name)) if name", "# For later parse by _validate_codes_of_cpp_method() self._source_lines = source_file.readlines() parsed_info", "it to input file # path if self.__args.output is None:", "can't parse this file, but we should pass it, this", "== CppHeaderParser.CppVariable: if cpp_object[\"type\"] != \"return\": if cpp_object[\"static\"]: self._validate_name(cpp_object, \"static_variant\")", "is not None: return True return False return True def", "an_object = dict() an_object[\"line_number\"] = aparameter[\"line_number\"] if (aparameter[\"type\"].endswith(\"::*\") and (\")\"", "We just ignored this situation: # Code Snippets: static RSignal<void(int)>", "parameter_names: aparameter = CppDefineParameter() aparameter[\"name\"] = parameter_name.strip() parameters.append(aparameter) result =", "config use to define filename rule \"argument\": \"variant\", \"static_variant\": \"variant\",", "'.join([rule_name.capitalize() for rule_name in name_re.split(\"_\")]), error_message) if self.__args.debug: traceback.print_stack() raise", "an_object[\"line_number\"] = aparameter[\"line_number\"] if (aparameter[\"type\"].endswith(\"::*\") and (\")\" in aparameter[\"name\"])): an_object[\"name\"]", "\"class_variant\", \"typedef\": \"class\", \"struct\": \"class\", \"enum\": \"class\", \"enum_value\": \"define\", \"union\":", "(\")\" in aparameter[\"name\"])): an_object[\"name\"] = re.match(r\"(\\w+).*\", aparameter[\"name\"]).group(1) try: self._validate_name(an_object, class_method_re)", "in parsed_info.typedefs: self._validate_cpp_object(cpp_object) except SyntaxError as e: print(str(e)) return 1", "if len(cpp_variable[\"type\"]) <= 0: return False return True def _get_cpp_method_re(self,", "re.match(r\"(\\w+).*\", class_name).group(1) def _validate_cpp_object(self, cpp_object): cpp_object_type = type(cpp_object) if cpp_object_type", "(amember[\"type\"] is not None): internal_predeclares = [\"class\", \"struct\", \"union\"] if", "Don't parse variable that implemented outside of # template class.", "= re.match(self._get_config(name_re)[\"re\"], cpp_object_name) if matched is None: filename = os.path.basename(self.__args.file_path)", "situation: # Code Snippets: static RSignal<void(int)> sReceived; if \"<\" not", "class_method_re = \"struct_method\" class_method_argument_re = \"struct_method_argument\" class_variant_re = \"struct_variant\" else:", "if amember[\"type\"] in internal_predeclares: is_skip_validate = True if not is_skip_validate:", "cpp_object[\"class\"] = matched.group(1) cpp_object[\"name\"] = matched.group(2) self._validate_name(cpp_object, \"class_method\") elif len(cpp_object[\"returns\"])", "def __init__(self): description='''A styler just target to naming conventions of", "0): if \">\" in cpp_object[\"name\"]: regex = r\"^[^<:]*?(?:(\\w+)::)?(\\w+)\\s*<\" matched =", "in parameter_names: aparameter = CppDefineParameter() aparameter[\"name\"] = parameter_name.strip() parameters.append(aparameter) result", "# Not found a semicolon, just ignored. pass skipped_lines =", "if ((amethod[\"name\"] != self._get_class_realname(cpp_object[\"name\"])) and (not amethod.get(\"constructor\", False)) and (not", "+ 1) if semicolonPos <= i: return; except ValueError: #", "not have return value(at least # \"void\"), it maybe macro", "re import sys import yaml import copy import six import", "cpp_object_type == CppNamespace: self._validate_name(cpp_object, \"namespace\") elif cpp_object_type == CppFileName: self._validate_name(cpp_object,", "# Constructor / Destructor will the same with class name", "matched.group(2) self._validate_name(cpp_object, \"class_method\") elif len(cpp_object[\"returns\"]) > 0: # If a", "self.__args.output = self.__args.file_path self.__config = yaml.load(open(self.__args.config)) old_base = self.__config[\"_base_\"] self.__config[\"_base_\"]", "= True elif c == \"{\": stack.append(i) elif c ==", "cpp_object_name = \"\" if isinstance(cpp_object, six.string_types): cpp_object_name = cpp_object cpp_object", "''.join(chars) def _validate_codes_of_cpp_method(self, cpp_method): start_line_index = cpp_method[\"line_number\"] - 1 #", "CppHeaderParser.CppHeaderParser.CppParseError as e: # CppHeaderParser can't parse this file, but", "if re.match(r\"(?:^|.*\\W)operator\\W.*\", amethod[\"debug\"]) is not None: return True return False", "\"\" if isinstance(cpp_object, six.string_types): cpp_object_name = cpp_object cpp_object = dict()", "0, i) - 2 stack.append(i) i += 1 first_i =", "it's a functor?? with \"class name::function\" style matched = re.match(r\"^\\w+\\s*\\(\\w*::\\*(\\w+)\\)\\(.*$\",", "the same with class name break self._validate_name(cpp_object, \"class_method\") break elif", "CppHeaderParser.CppMethod: # Exclude \"main\" function while parsing global function while", "= re.search(name_re, content).span()[0] parameters_start_pos = content.index('(', name_start_pos) parameters_stop_pos = content.index(')',", "parsed_info.classes_order: self._validate_cpp_object(cpp_object) # Verify Struct Names for cpp_object in parsed_info.structs_order:", "\"define_function\": \"function\", \"class_method\": \"function\", \"struct_method\": \"class_method\", \"class_variant\": \"variant\", \"struct_variant\": \"class_variant\",", "\"/\") and (content[i + 1] == \"/\"): is_finding_single_comment = True", "\"enum_value\") elif cpp_object_type == CppHeaderParser.CppVariable: if cpp_object[\"type\"] != \"return\": if", "my_config def _is_valid_variable(self, cpp_variable): if cpp_variable[\"type\"] == \"return\": return False", "YAML format)\", required=True) parser.add_argument(\"-o\", \"--output\", help=\"Output file path\") parser.add_argument(\"-d\", \"--debug\",", "if isinstance(amethod, six.string_types): amethod_name = amethod else: amethod_name = amethod[\"name\"]", "return result def _is_special_method(self, amethod): if isinstance(amethod, six.string_types): amethod_name =", "if name in self.__config: my_config.update(self.__config[name]) return my_config def _is_valid_variable(self, cpp_variable):", "r\"^[^<:]*?(?:(\\w+)::)?(\\w+)\\s*<\" matched = re.search(regex, cpp_object[\"debug\"]) if matched.group(1) is not None:", "= name result[\"parameters\"] = parameters return result def _is_special_method(self, amethod):", "None) or (len(cpp_object[\"class\"]) <= 0): if \">\" in cpp_object[\"name\"]: regex", "= cpp_object_name cpp_object[\"line_number\"] = -1 elif \"name\" in cpp_object: cpp_object_name", "SyntaxError: is_need_reraise = True try: self._validate_name(amethod, \"define_function\") is_need_reraise = False", "# FIXME: We just ignored this situation: # Code Snippets:", "self._source_lines[start_line_index:] content = '\\n'.join(rest_lines) code_lines = [] name_re = self._get_cpp_method_re(cpp_method[\"name\"])", "< len(content)): c = content[i] if is_finding_block_comment: # If finding", "CppHeaderParser.CppVariable: if cpp_object[\"type\"] != \"return\": if cpp_object[\"static\"]: self._validate_name(cpp_object, \"static_variant\") elif", "class_re = \"struct\" class_method_re = \"struct_method\" class_method_argument_re = \"struct_method_argument\" class_variant_re", "= matched.group(1) parameters = [] if matched.group(2) is not None:", "isinstance(amethod, six.string_types): amethod_name = amethod else: amethod_name = amethod[\"name\"] founded", "If finding block comment, then skip all other searching if", "= \"struct\" class_method_re = \"struct_method\" class_method_argument_re = \"struct_method_argument\" class_variant_re =", "self._validate_codes_of_cpp_method(cpp_object) if cpp_object[\"name\"] == \"main\": break if self._is_special_method(cpp_object): break if", "parameters = [] if matched.group(2) is not None: parameter_names =", "not in [\"class\", \"struct\", \"union\"]: if not cpp_object[\"type\"].endswith(\"::\"): # Don't", "parameters_stop_pos + 1) except ValueError: return; try: semicolonPos = content.index(';',", "= CppNamespace() cpp_object[\"name\"] = namespace self._validate_cpp_object(cpp_object) # Verify Typdef Names", "break if (cpp_object[\"class\"] is None) or (len(cpp_object[\"class\"]) <= 0): if", "[\"class\", \"struct\", \"union\"] if amember[\"type\"] in internal_predeclares: is_skip_validate = True", "+ 1] == \"/\"): is_finding_single_comment = True elif c ==", "target to naming conventions of source code''' parser = argparse.ArgumentParser(description=description)", "parsing # the class. self._validate_name(cpp_object, \"global_variant\") elif cpp_object_type == CppHeaderParser.CppMethod:", "in parsed_info.functions: self._validate_cpp_object(cpp_object) # Verify Class Names for cpp_object in", "in CppHeaderParser.supportedAccessSpecifier: for amember in cpp_object[\"properties\"][access_specifier]: is_skip_validate = False if", "amethod.get(\"destructor\", False))): try: self._validate_name(amethod, class_method_re) except SyntaxError: is_need_reraise = True", "def _get_config(self, name): override_table = { \"class\": \"_base_\", \"function\": \"_base_\",", "cpp_object['type']: self._validate_cpp_object(cpp_object) for namespace in parsed_info.namespaces: cpp_object = CppNamespace() cpp_object[\"name\"]", "amember in cpp_object[\"properties\"][access_specifier]: is_skip_validate = False if (\"type\" in amember)", "elif (c == \"/\") and (content[i + 1] == \"/\"):", "parameter_name in parameter_names: aparameter = CppDefineParameter() aparameter[\"name\"] = parameter_name.strip() parameters.append(aparameter)", "parse this file, but we should pass it, this #", "try: self._validate_name(amethod, \"define_function\") is_need_reraise = False except SyntaxError: pass if", "class CppNamespace(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"] = -1", "1] i += 1 if len(stack) <= 0: content =", "\"}\": last_i = i del stack[len(stack) - 1] i +=", "cpp_method[\"line_number\"] + content.count(\"\\n\", 0, i) - 2 stack.append(i) i +=", "in parsed_info.enums: self._validate_cpp_object(cpp_object) # Verify Variable Names for cpp_object in", "== \"*\"): is_finding_block_comment = True elif is_finding_single_comment: # If finding", "not self._is_special_method(amethod): if ((amethod[\"name\"] != self._get_class_realname(cpp_object[\"name\"])) and (not amethod.get(\"constructor\", False))", "isinstance(cpp_object, six.string_types): cpp_object_name = cpp_object cpp_object = dict() cpp_object[\"name\"] =", "== \"/\"): is_finding_single_comment = True elif c == \"{\": stack.append(i)", "cpp_variable[\"type\"] == \"return\": return False if len(cpp_variable[\"type\"]) <= 0: return", "-1 class CppDefineParameter(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"] =", "is None: self.__args.output = self.__args.file_path self.__config = yaml.load(open(self.__args.config)) old_base =", "CppHeaderParser.CppStruct: self._validate_name(cpp_object, \"struct\") elif cpp_object_type == CppHeaderParser.CppEnum: self._validate_name(cpp_object, \"enum\") line_number", "\"variant\") def _validate_name(self, cpp_object, name_re): cpp_object_name = \"\" if isinstance(cpp_object,", "file path (In YAML format)\", required=True) parser.add_argument(\"-o\", \"--output\", help=\"Output file", "break self._validate_codes_of_cpp_method(cpp_object) if cpp_object[\"name\"] == \"main\": break if self._is_special_method(cpp_object): break", "trace stack\") parser.add_argument(\"file_path\", help=\"Source file path\") self.__args = parser.parse_args() #", "if \">\" in cpp_object[\"name\"]: regex = r\"^[^<:]*?(?:(\\w+)::)?(\\w+)\\s*<\" matched = re.search(regex,", "elif c == \"{\": stack.append(i) elif c == \"}\": last_i", "== CppFileName: self._validate_name(cpp_object, \"filename\") def exec_(self): try: with open(self.__args.file_path, \"r\")", "False if len(cpp_variable[\"type\"]) <= 0: return False return True def", "return re.escape(name) # Operator methods chars = [] for achar", "chars.append(\"\\\\s*\") if achar.isalnum(): chars.append(achar) else: chars.append(\"\\\\\") chars.append(achar) return \"operator%s\" %", "in cpp_object: line_number = cpp_object[\"line_number\"] for amember in cpp_object[\"values\"]: #", "ValueError: return; try: semicolonPos = content.index(';', parameters_stop_pos + 1) if", "CppFileName: self._validate_name(cpp_object, \"filename\") def exec_(self): try: with open(self.__args.file_path, \"r\") as", "it's line # number if \"line_number\" not in amember: amember[\"line_number\"]", "i: return; except ValueError: # Not found a semicolon, just", "prefix = \"operator\" if not name.startswith(prefix): return re.escape(name) # Operator", "1 if len(stack) <= 0: content = content[first_i:last_i] founded =", "Avoid checking member variable inside function body. if '{' not", "import yaml import copy import six import os.path import traceback", "+ 1) except ValueError: return; try: semicolonPos = content.index(';', parameters_stop_pos", "__init__(self): self[\"name\"] = None self[\"line_number\"] = -1 class CppFileName(dict): def", "import copy import six import os.path import traceback class CppDefine(dict):", "sys import yaml import copy import six import os.path import", "is_finding_single_comment = False while (len(stack) > 0) and (i <", "found a semicolon, just ignored. pass skipped_lines = cpp_method[\"line_number\"] +", "import six import os.path import traceback class CppDefine(dict): def __init__(self):", "all other searching if c == \"\\n\": is_finding_single_comment = False", "not None: break self._validate_codes_of_cpp_method(cpp_object) if cpp_object[\"name\"] == \"main\": break if", "\"__main__\": # Execute only if run as a script main()", "False)) and (not amethod.get(\"destructor\", False))): try: self._validate_name(amethod, class_method_re) except SyntaxError:", "> 0) and (i < len(content)): c = content[i] if", "( filename, cpp_object[\"line_number\"], cpp_object_name, name_re, error_message)) def _get_class_realname(self, class_name): return", "= self._get_config(name_re)[\"error\"] if len(error_message) > 0: error_message = \"%s %s\"", "if \"struct\" in cpp_object[\"declaration_method\"]: class_re = \"struct\" class_method_re = \"struct_method\"", "\"class_method\", \"class_variant\": \"variant\", \"struct_variant\": \"class_variant\", \"typedef\": \"class\", \"struct\": \"class\", \"enum\":", "self._validate_name(cpp_object, class_re) for amethod in cpp_object.get_all_methods(): matched = re.match(r\".*typedef\\W[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", amethod[\"debug\"])", "return; try: semicolonPos = content.index(';', parameters_stop_pos + 1) if semicolonPos", "is_finding_block_comment = True elif is_finding_single_comment: # If finding single comment,", "class CppFileName(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"] = -1", "__name__ == \"__main__\": # Execute only if run as a", "\"filename\": \"_base_\", # Special config use to define filename rule", "a = Application() sys.exit(a.exec_()) if __name__ == \"__main__\": # Execute", "matched = re.match(r\".*?(\\w+)\\W+$\", cpp_object[\"debug\"]) if matched is not None: cpp_object_name", "# Verify Typdef Names for cpp_object in parsed_info.typedefs: self._validate_cpp_object(cpp_object) except", "elif cpp_object_type == CppHeaderParser.CppClass: if \"struct\" in cpp_object[\"declaration_method\"]: class_re =", "problem. print(str(e)) return 0 return 0 def main(): a =", "{ \"re\":\"[a-zA-Z0-9_]+\", \"error\": \"\", } self.__config[\"_base_\"].update(old_base) def parse_define(self, adefine): matched", "if len(error_message) > 0: error_message = \"%s %s\" % (", "in founded: avariant = dict() avariant[\"name\"] = aname avariant[\"line_number\"] =", "in cpp_object[\"parameters\"]: self._validate_name(aparameter, \"define_function_argument\") elif cpp_object_type == CppHeaderParser.CppClass: if \"struct\"", "self._validate_cpp_object(cpp_object) # Verify Typdef Names for cpp_object in parsed_info.typedefs: self._validate_cpp_object(cpp_object)", "len(splitted) > 1: cpp_object_name = splitted[-1] if '...' in cpp_object_name:", "is None) or (len(cpp_object[\"class\"]) <= 0): if \">\" in cpp_object[\"name\"]:", "i = content.index('{', parameters_stop_pos + 1) except ValueError: return; try:", "to define filename rule \"argument\": \"variant\", \"static_variant\": \"variant\", \"global_variant\": \"variant\",", "cpp_method[\"line_number\"] - 1 # Extract cpp method codes rest_lines =", "filename rule \"argument\": \"variant\", \"static_variant\": \"variant\", \"global_variant\": \"variant\", \"function_argument\": \"argument\",", "but we should pass it, this # is the CppHeaderParser's", "if isinstance(an_argument, six.string_types): return an_argument if len(an_argument[\"name\"]) > 0: return", "elif cpp_object_type == CppNamespace: self._validate_name(cpp_object, \"namespace\") elif cpp_object_type == CppFileName:", "in amember) and (amember[\"type\"] is not None): internal_predeclares = [\"class\",", "content[i] if is_finding_block_comment: # If finding block comment, then skip", "cpp_object in parsed_info.variables: # Avoid checking member variable inside function", "content) for aname in founded: avariant = dict() avariant[\"name\"] =", "else: class_re = \"class\" class_method_re = \"class_method\" class_method_argument_re = \"class_method_argument\"", "adefine): matched = re.match(r\"[^\\w]*(\\w+)(?:\\(([^\\)]*)\\)|\\s*).*\", adefine) name = matched.group(1) parameters =", "\"struct\" class_method_re = \"struct_method\" class_method_argument_re = \"struct_method_argument\" class_variant_re = \"struct_variant\"", "%s\" % ( ' '.join([rule_name.capitalize() for rule_name in name_re.split(\"_\")]), error_message)", "(\"debug\" in cpp_object): matched = re.match(r\".*?(\\w+)\\W+$\", cpp_object[\"debug\"]) if matched is", "matched.group(1) parameters = [] if matched.group(2) is not None: parameter_names", "with normal \"function\" style matched = re.match(r\"[^\\(]*\\([^\\)]*\\W(\\w+)\\W.*\\).*\", an_argument[\"type\"]) if matched", "== CppHeaderParser.CppUnion: self._validate_name(cpp_object, \"union\") elif cpp_object_type == CppNamespace: self._validate_name(cpp_object, \"namespace\")", "searching if (c == \"*\") and (content[i + 1] ==", "None): internal_predeclares = [\"class\", \"struct\", \"union\"] if amember[\"type\"] in internal_predeclares:", "stack.append(i) elif c == \"}\": last_i = i del stack[len(stack)", "that implemented outside of # template class. It's already be", "= i del stack[len(stack) - 1] i += 1 if", "finding block comment, then skip all other searching if (c", "dict() cpp_object[\"name\"] = cpp_object_name cpp_object[\"line_number\"] = -1 elif \"name\" in", "raise SyntaxError(\"%s:%s:error: Name '%s' isn't matched with rule : %s!", "Names for cpp_object in parsed_info.enums: self._validate_cpp_object(cpp_object) # Verify Variable Names", ": \"struct RArraySize <T ( & ) [ N ]>", "return False return True def _get_cpp_method_re(self, name): prefix = \"operator\"", "import os.path import traceback class CppDefine(dict): def __init__(self): self[\"name\"] =", "with open(self.__args.file_path, \"r\") as source_file: # For later parse by", "+ 1] == \"/\"): is_finding_block_comment = False elif (c ==", "1 except CppHeaderParser.CppHeaderParser.CppParseError as e: # CppHeaderParser can't parse this", "check it . return if len(cpp_object_name) <= 0: # Does", "re.match(r\".*\\>\\s*{$\", cpp_object[\"debug\"]) is not None: break self._validate_codes_of_cpp_method(cpp_object) if cpp_object[\"name\"] ==", "will the same with class name break self._validate_name(cpp_object, \"class_method\") break", "member variable inside function body. if '{' not in cpp_object['type']:", "= cpp_method[\"line_number\"] + content.count(\"\\n\", 0, i) - 2 stack.append(i) i", "if len(splitted) > 1: cpp_object_name = splitted[-1] if '...' in", "None: cpp_object[\"class\"] = matched.group(1) cpp_object[\"name\"] = matched.group(2) self._validate_name(cpp_object, \"class_method\") elif", "\"define\") else: # Function Liked Define Name self._validate_name(cpp_object, \"define_function\") for", "False except SyntaxError: pass if is_need_reraise: raise else: an_object[\"name\"] =", "name_re.split(\"_\")]), error_message) if self.__args.debug: traceback.print_stack() raise SyntaxError(\"%s:%s:error: Name '%s' isn't", "False except SyntaxError: pass if is_need_reraise: raise for aparameter in", "try: self._validate_name(an_object, class_method_re) except SyntaxError: is_need_reraise = True try: self._validate_name(amethod,", "content = '\\n'.join(rest_lines) code_lines = [] name_re = self._get_cpp_method_re(cpp_method[\"name\"]) name_start_pos", "self._validate_name(cpp_object, \"union\") elif cpp_object_type == CppNamespace: self._validate_name(cpp_object, \"namespace\") elif cpp_object_type", "cpp_object = CppNamespace() cpp_object[\"name\"] = namespace self._validate_cpp_object(cpp_object) # Verify Typdef", "__init__(self): self[\"name\"] = None self[\"parameters\"] = [] self[\"line_number\"] = -1", "0: return False return True def _get_cpp_method_re(self, name): prefix =", "(len(cpp_object[\"class\"]) <= 0): if \">\" in cpp_object[\"name\"]: regex = r\"^[^<:]*?(?:(\\w+)::)?(\\w+)\\s*<\"", "# \"void\"), it maybe macro invokes. # FIXME: We just", "CppHeaderParser.CppHeader(self.__args.file_path) # Verify File Names filename = os.path.basename(self.__args.file_path) cpp_object =", "skipped_lines = cpp_method[\"line_number\"] + content.count(\"\\n\", 0, i) - 2 stack.append(i)", "% ''.join(chars) def _validate_codes_of_cpp_method(self, cpp_method): start_line_index = cpp_method[\"line_number\"] - 1", "Names for cpp_object in parsed_info.structs_order: self._validate_cpp_object(cpp_object) # Verify Enum Names", "class CppDefineParameter(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"] = -1", "= [] if matched.group(2) is not None: parameter_names = matched.group(2).split(',')", "is_finding_single_comment = False elif (c == \"/\") and (content[i +", "Names for cpp_object in parsed_info.functions: self._validate_cpp_object(cpp_object) # Verify Class Names", "= aname avariant[\"line_number\"] = cpp_method[\"line_number\"] self._validate_name(avariant, \"variant\") def _validate_name(self, cpp_object,", "CppDefineParameter(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"] = -1 class", "is not None: break self._validate_codes_of_cpp_method(cpp_object) if cpp_object[\"name\"] == \"main\": break", "\"struct\": \"class\", \"enum\": \"class\", \"enum_value\": \"define\", \"union\": \"struct\", } my_config", "None: cpp_object_name = matched.group(1) else: return # Parse union like", "in parsed_info.variables: # Avoid checking member variable inside function body.", "CppFileName() cpp_object[\"name\"] = filename self._validate_cpp_object(cpp_object) # Verify Define Names for", "define_text in parsed_info.defines: self._validate_cpp_object(self.parse_define(define_text)) # Verify Function Names for cpp_object", "None self[\"line_number\"] = -1 class Application(object): def __init__(self): description='''A styler", "CppDefine(dict): def __init__(self): self[\"name\"] = None self[\"parameters\"] = [] self[\"line_number\"]", "Normal Define Name self._validate_name(cpp_object, \"define\") else: # Function Liked Define", "in cpp_object[\"structs\"][access_specifier]: self._validate_cpp_object(amember) for amember in cpp_object[\"enums\"][access_specifier]: self._validate_cpp_object(amember) elif cpp_object_type", "+= 1 if len(stack) <= 0: content = content[first_i:last_i] founded", "it, this # is the CppHeaderParser's problem. print(str(e)) return 0", "self[\"name\"] = None self[\"line_number\"] = -1 class Application(object): def __init__(self):", "result def _is_special_method(self, amethod): if isinstance(amethod, six.string_types): amethod_name = amethod", "# template class. It's already be parsed when parsing #", "\"/\") and (content[i + 1] == \"*\"): is_finding_block_comment = True", "in cpp_object[\"properties\"][access_specifier]: is_skip_validate = False if (\"type\" in amember) and", "self[\"name\"] = None self[\"parameters\"] = [] self[\"line_number\"] = -1 class", "filename = os.path.basename(self.__args.file_path) error_message = self._get_config(name_re)[\"error\"] if len(error_message) > 0:", "1) except ValueError: return; try: semicolonPos = content.index(';', parameters_stop_pos +", "file path\") parser.add_argument(\"-d\", \"--debug\", action='store_true', help=\"Print trace stack\") parser.add_argument(\"file_path\", help=\"Source", "\"class_method_argument\" class_variant_re = \"class_variant\" self._validate_name(cpp_object, class_re) for amethod in cpp_object.get_all_methods():", "\"main\": break if self._is_special_method(cpp_object): break if (cpp_object[\"class\"] is None) or", "parsed_info.typedefs: self._validate_cpp_object(cpp_object) except SyntaxError as e: print(str(e)) return 1 except", "chars = [] for achar in name[len(prefix):]: chars.append(\"\\\\s*\") if achar.isalnum():", "elif cpp_object_type == CppHeaderParser.CppMethod: # Exclude \"main\" function while parsing", "code''' parser = argparse.ArgumentParser(description=description) parser.add_argument(\"-c\", \"--config\", help=\"Configuration file path (In", "variable inside function body. if '{' not in cpp_object['type']: self._validate_cpp_object(cpp_object)", "with class name break self._validate_name(cpp_object, \"class_method\") break elif cpp_object_type ==", "variable that implemented outside of # template class. It's already", "is_skip_validate = False if (\"type\" in amember) and (amember[\"type\"] is", "= \"class_method\" class_method_argument_re = \"class_method_argument\" class_variant_re = \"class_variant\" self._validate_name(cpp_object, class_re)", "/ Destructor will the same with class name break self._validate_name(cpp_object,", "break self._validate_name(cpp_object, \"class_method\") break elif cpp_object_type == CppHeaderParser.CppUnion: self._validate_name(cpp_object, \"union\")", "of # template class. It's already be parsed when parsing", "Names filename = os.path.basename(self.__args.file_path) cpp_object = CppFileName() cpp_object[\"name\"] = filename", "use to define filename rule \"argument\": \"variant\", \"static_variant\": \"variant\", \"global_variant\":", "= { \"class\": \"_base_\", \"function\": \"_base_\", \"variant\": \"_base_\", \"namespace\": \"_base_\",", "= aparameter[\"line_number\"] if (aparameter[\"type\"].endswith(\"::*\") and (\")\" in aparameter[\"name\"])): an_object[\"name\"] =", "cpp_object[\"name\"] = matched.group(2) self._validate_name(cpp_object, \"class_method\") elif len(cpp_object[\"returns\"]) > 0: #", "True if not is_skip_validate: if amember[\"static\"]: self._validate_name(amember, \"static_variant\") else: self._validate_name(amember,", "yaml import copy import six import os.path import traceback class", "amethod[\"debug\"]) if matched is None: self._validate_codes_of_cpp_method(amethod) if not self._is_special_method(amethod): if", "cpp_object_type == CppHeaderParser.CppVariable: if cpp_object[\"type\"] != \"return\": if cpp_object[\"static\"]: self._validate_name(cpp_object,", "base_name = override_table[name] my_config.update(self._get_config(base_name)) if name in self.__config: my_config.update(self.__config[name]) return", "# Verify Function Names for cpp_object in parsed_info.functions: self._validate_cpp_object(cpp_object) #", "re.findall(r\"(?:^|[^\\w]+)operator[^\\w]+\", amethod_name) if len(founded) <= 0: if re.match(r\"(?:^|.*\\W)operator\\W.*\", amethod[\"debug\"]) is", "cpp_object_type == CppHeaderParser.CppClass: if \"struct\" in cpp_object[\"declaration_method\"]: class_re = \"struct\"", "os.path import traceback class CppDefine(dict): def __init__(self): self[\"name\"] = None", "return; except ValueError: # Not found a semicolon, just ignored.", "\"struct\") elif cpp_object_type == CppHeaderParser.CppEnum: self._validate_name(cpp_object, \"enum\") line_number = -1", "valid name, we must not check it . return matched", "Enum Names for cpp_object in parsed_info.enums: self._validate_cpp_object(cpp_object) # Verify Variable", "# Code Snippets: static RSignal<void(int)> sReceived; if \"<\" not in", "if is_need_reraise: raise for aparameter in amethod[\"parameters\"]: an_object = dict()", "line_number = -1 if \"line_number\" in cpp_object: line_number = cpp_object[\"line_number\"]", "method codes rest_lines = self._source_lines[start_line_index:] content = '\\n'.join(rest_lines) code_lines =", "\"global_variant\": \"variant\", \"function_argument\": \"argument\", \"class_method_argument\": \"function_argument\", \"struct_method_argument\": \"class_method_argument\", \"define_function_argument\": \"function_argument\",", "import sys import yaml import copy import six import os.path", "== CppNamespace: self._validate_name(cpp_object, \"namespace\") elif cpp_object_type == CppFileName: self._validate_name(cpp_object, \"filename\")", "skip all other searching if c == \"\\n\": is_finding_single_comment =", "access_specifier in CppHeaderParser.supportedAccessSpecifier: for amember in cpp_object[\"properties\"][access_specifier]: is_skip_validate = False", "is_need_reraise: raise else: an_object[\"name\"] = self._get_argument_name(aparameter) self._validate_name(an_object, class_method_argument_re) else: self._validate_name(", "conventions of source code''' parser = argparse.ArgumentParser(description=description) parser.add_argument(\"-c\", \"--config\", help=\"Configuration", "cpp_object[\"declaration_method\"]: class_re = \"struct\" class_method_re = \"struct_method\" class_method_argument_re = \"struct_method_argument\"", "(content[i + 1] == \"*\"): is_finding_block_comment = True elif is_finding_single_comment:", "re.match(r\"(\\w+).*\", aparameter[\"name\"]).group(1) try: self._validate_name(an_object, class_method_re) except SyntaxError: is_need_reraise = True", "just ignored this situation: # Code Snippets: static RSignal<void(int)> sReceived;", "return matched = re.match(self._get_config(name_re)[\"re\"], cpp_object_name) if matched is None: filename", "achar.isalnum(): chars.append(achar) else: chars.append(\"\\\\\") chars.append(achar) return \"operator%s\" % ''.join(chars) def", "= amethod else: amethod_name = amethod[\"name\"] founded = re.findall(r\"(?:^|[^\\w]+)operator[^\\w]+\", amethod_name)", "if len(an_argument[\"name\"]) > 0: return an_argument[\"name\"] # If it's a", "default it to input file # path if self.__args.output is", "(content[i + 1] == \"/\"): is_finding_block_comment = False elif (c", "_get_config(self, name): override_table = { \"class\": \"_base_\", \"function\": \"_base_\", \"variant\":", "by _validate_codes_of_cpp_method() self._source_lines = source_file.readlines() parsed_info = CppHeaderParser.CppHeader(self.__args.file_path) # Verify", "return my_config def _is_valid_variable(self, cpp_variable): if cpp_variable[\"type\"] == \"return\": return", "CppHeaderParser.supportedAccessSpecifier: for amember in cpp_object[\"properties\"][access_specifier]: is_skip_validate = False if (\"type\"", "len(cpp_object[\"returns\"]) > 0: # If a function does not have", "\"union\") elif cpp_object_type == CppNamespace: self._validate_name(cpp_object, \"namespace\") elif cpp_object_type ==", "\"union\"]: if not cpp_object[\"type\"].endswith(\"::\"): # Don't parse variable that implemented", "\"_base_\", \"variant\": \"_base_\", \"namespace\": \"_base_\", \"define\": \"_base_\", \"filename\": \"_base_\", #", "re.match(r\"[^\\w]*(\\w+)(?:\\(([^\\)]*)\\)|\\s*).*\", adefine) name = matched.group(1) parameters = [] if matched.group(2)", "Names for cpp_object in parsed_info.variables: # Avoid checking member variable", "re.match(r\"(?:^|.*\\W)operator\\W.*\", amethod[\"debug\"]) is not None: return True return False return", "self[\"line_number\"] = -1 class CppDefineParameter(dict): def __init__(self): self[\"name\"] = None", "and (not amethod.get(\"constructor\", False)) and (not amethod.get(\"destructor\", False))): try: self._validate_name(amethod,", "amethod[\"parameters\"]: an_object = dict() an_object[\"line_number\"] = aparameter[\"line_number\"] if (aparameter[\"type\"].endswith(\"::*\") and", "-1 class Application(object): def __init__(self): description='''A styler just target to", "= matched.group(1) cpp_object[\"name\"] = matched.group(2) self._validate_name(cpp_object, \"class_method\") elif len(cpp_object[\"returns\"]) >", "founded: avariant = dict() avariant[\"name\"] = aname avariant[\"line_number\"] = cpp_method[\"line_number\"]", "chars.append(achar) else: chars.append(\"\\\\\") chars.append(achar) return \"operator%s\" % ''.join(chars) def _validate_codes_of_cpp_method(self,", "styler just target to naming conventions of source code''' parser", "internal_predeclares: is_skip_validate = True if not is_skip_validate: if amember[\"static\"]: self._validate_name(amember,", "<= 0: if re.match(r\"(?:^|.*\\W)operator\\W.*\", amethod[\"debug\"]) is not None: return True", "1] == \"/\"): is_finding_block_comment = False elif (c == \"/\")", "return if len(cpp_object_name) <= 0: # Does not have valid", "Application() sys.exit(a.exec_()) if __name__ == \"__main__\": # Execute only if", "def _get_argument_name(self, an_argument): if isinstance(an_argument, six.string_types): return an_argument if len(an_argument[\"name\"])", "semicolonPos = content.index(';', parameters_stop_pos + 1) if semicolonPos <= i:", "if (c == \"*\") and (content[i + 1] == \"/\"):", "\"void\"), it maybe macro invokes. # FIXME: We just ignored", "name::function\" style matched = re.match(r\"^\\w+\\s*\\(\\w*::\\*(\\w+)\\)\\(.*$\", an_argument[\"type\"]) if matched is None:", "a functor?? with \"class name::function\" style matched = re.match(r\"^\\w+\\s*\\(\\w*::\\*(\\w+)\\)\\(.*$\", an_argument[\"type\"])", "override_table[name] my_config.update(self._get_config(base_name)) if name in self.__config: my_config.update(self.__config[name]) return my_config def", "= 0 is_finding_block_comment = False is_finding_single_comment = False while (len(stack)", "is not None: parameter_names = matched.group(2).split(',') for parameter_name in parameter_names:", "cpp_object[\"name\"]: regex = r\"^[^<:]*?(?:(\\w+)::)?(\\w+)\\s*<\" matched = re.search(regex, cpp_object[\"debug\"]) if matched.group(1)", "content.index(';', parameters_stop_pos + 1) if semicolonPos <= i: return; except", "file path\") self.__args = parser.parse_args() # If user does not", "names splitted = cpp_object_name.split() if len(splitted) > 1: cpp_object_name =", "i += 1 if len(stack) <= 0: content = content[first_i:last_i]", "name in override_table: base_name = override_table[name] my_config.update(self._get_config(base_name)) if name in", "== \"*\") and (content[i + 1] == \"/\"): is_finding_block_comment =", "- 1] i += 1 if len(stack) <= 0: content", "= [] self[\"line_number\"] = -1 class CppDefineParameter(dict): def __init__(self): self[\"name\"]", "amethod): if isinstance(amethod, six.string_types): amethod_name = amethod else: amethod_name =", "cpp_object[\"line_number\"] = -1 elif \"name\" in cpp_object: cpp_object_name = cpp_object[\"name\"]", "= True elif is_finding_single_comment: # If finding single comment, then", "cpp_object: cpp_object_name = cpp_object[\"name\"] if ('<' in cpp_object_name) and (\"debug\"", "enum value does not have it's line # number if", "= override_table[name] my_config.update(self._get_config(base_name)) if name in self.__config: my_config.update(self.__config[name]) return my_config", "cpp_object, name_re): cpp_object_name = \"\" if isinstance(cpp_object, six.string_types): cpp_object_name =", "cpp_object[\"properties\"][access_specifier]: is_skip_validate = False if (\"type\" in amember) and (amember[\"type\"]", "except SyntaxError: pass if is_need_reraise: raise else: an_object[\"name\"] = self._get_argument_name(aparameter)", "& ) [ N ]> {\" if \"debug\" in cpp_object:", "not check it . return if len(cpp_object_name) <= 0: #", "six.string_types): return an_argument if len(an_argument[\"name\"]) > 0: return an_argument[\"name\"] #", "as e: print(str(e)) return 1 except CppHeaderParser.CppHeaderParser.CppParseError as e: #", "[] self[\"line_number\"] = -1 class CppDefineParameter(dict): def __init__(self): self[\"name\"] =", "print(str(e)) return 0 return 0 def main(): a = Application()", "while parsing global function while True: # FIXME: Parse special", "this # is the CppHeaderParser's problem. print(str(e)) return 0 return", "line_number self._validate_name(amember, \"enum_value\") elif cpp_object_type == CppHeaderParser.CppVariable: if cpp_object[\"type\"] !=", "os.path.basename(self.__args.file_path) error_message = self._get_config(name_re)[\"error\"] if len(error_message) > 0: error_message =", "!= \"return\": if cpp_object[\"static\"]: self._validate_name(cpp_object, \"static_variant\") elif cpp_object[\"type\"] not in", "= re.match(r\"^\\w+\\s*\\(\\w*::\\*(\\w+)\\)\\(.*$\", an_argument[\"type\"]) if matched is None: # with normal", "' '.join([rule_name.capitalize() for rule_name in name_re.split(\"_\")]), error_message) if self.__args.debug: traceback.print_stack()", "(not amethod.get(\"destructor\", False))): try: self._validate_name(amethod, class_method_re) except SyntaxError: is_need_reraise =", "cpp_object_name = splitted[-1] if '...' in cpp_object_name: # Does not", "import CppHeaderParser import re import sys import yaml import copy", "\"operator\" if not name.startswith(prefix): return re.escape(name) # Operator methods chars", "try: i = content.index('{', parameters_stop_pos + 1) except ValueError: return;", "error_message = self._get_config(name_re)[\"error\"] if len(error_message) > 0: error_message = \"%s", "six.string_types): cpp_object_name = cpp_object cpp_object = dict() cpp_object[\"name\"] = cpp_object_name", "line # number if \"line_number\" not in amember: amember[\"line_number\"] =", "not specific output path, we default it to input file", "cpp_object_type == CppHeaderParser.CppUnion: self._validate_name(cpp_object, \"union\") elif cpp_object_type == CppNamespace: self._validate_name(cpp_object,", "'%s' isn't matched with rule : %s! %s\" % (", "= cpp_method[\"line_number\"] - 1 # Extract cpp method codes rest_lines", "cpp_object[\"static\"]: self._validate_name(cpp_object, \"static_variant\") elif cpp_object[\"type\"] not in [\"class\", \"struct\", \"union\"]:", "avariant[\"name\"] = aname avariant[\"line_number\"] = cpp_method[\"line_number\"] self._validate_name(avariant, \"variant\") def _validate_name(self,", "adefine) name = matched.group(1) parameters = [] if matched.group(2) is", "N ]> {\" if \"debug\" in cpp_object: if re.match(r\".*\\>\\s*{$\", cpp_object[\"debug\"])", "for cpp_object in parsed_info.variables: # Avoid checking member variable inside", "= False except SyntaxError: pass if is_need_reraise: raise else: an_object[\"name\"]", "cpp_object[\"line_number\"] for amember in cpp_object[\"values\"]: # Use parent line number", "self._validate_codes_of_cpp_method(amethod) if not self._is_special_method(amethod): if ((amethod[\"name\"] != self._get_class_realname(cpp_object[\"name\"])) and (not", "None: filename = os.path.basename(self.__args.file_path) error_message = self._get_config(name_re)[\"error\"] if len(error_message) >", "sys.exit(a.exec_()) if __name__ == \"__main__\": # Execute only if run", "avariant = dict() avariant[\"name\"] = aname avariant[\"line_number\"] = cpp_method[\"line_number\"] self._validate_name(avariant,", "CppNamespace() cpp_object[\"name\"] = namespace self._validate_cpp_object(cpp_object) # Verify Typdef Names for", "inside function body. if '{' not in cpp_object['type']: self._validate_cpp_object(cpp_object) for", "\"%s %s\" % ( ' '.join([rule_name.capitalize() for rule_name in name_re.split(\"_\")]),", "False while (len(stack) > 0) and (i < len(content)): c", "Function Liked Define Name self._validate_name(cpp_object, \"define_function\") for aparameter in cpp_object[\"parameters\"]:", "\"--config\", help=\"Configuration file path (In YAML format)\", required=True) parser.add_argument(\"-o\", \"--output\",", "== \"{\": stack.append(i) elif c == \"}\": last_i = i", "or (len(cpp_object[\"class\"]) <= 0): if \">\" in cpp_object[\"name\"]: regex =", "matched.group(2).split(',') for parameter_name in parameter_names: aparameter = CppDefineParameter() aparameter[\"name\"] =", "Parse union like names splitted = cpp_object_name.split() if len(splitted) >", "\"argument\", \"class_method_argument\": \"function_argument\", \"struct_method_argument\": \"class_method_argument\", \"define_function_argument\": \"function_argument\", \"define_function\": \"function\", \"class_method\":", "elif len(cpp_object[\"returns\"]) > 0: # If a function does not", "\"typedef\": \"class\", \"struct\": \"class\", \"enum\": \"class\", \"enum_value\": \"define\", \"union\": \"struct\",", "[ N ]> {\" if \"debug\" in cpp_object: if re.match(r\".*\\>\\s*{$\",", "class_method_re = \"class_method\" class_method_argument_re = \"class_method_argument\" class_variant_re = \"class_variant\" self._validate_name(cpp_object,", "== \"}\": last_i = i del stack[len(stack) - 1] i", "Function Names for cpp_object in parsed_info.functions: self._validate_cpp_object(cpp_object) # Verify Class", "= self.__config[\"_base_\"] self.__config[\"_base_\"] = { \"re\":\"[a-zA-Z0-9_]+\", \"error\": \"\", } self.__config[\"_base_\"].update(old_base)", "if re.match(r\".*\\>\\s*{$\", cpp_object[\"debug\"]) is not None: break self._validate_codes_of_cpp_method(cpp_object) if cpp_object[\"name\"]", "cpp_object_name) and (\"debug\" in cpp_object): matched = re.match(r\".*?(\\w+)\\W+$\", cpp_object[\"debug\"]) if", "we must not check it . return matched = re.match(self._get_config(name_re)[\"re\"],", "not cpp_object[\"type\"].endswith(\"::\"): # Don't parse variable that implemented outside of", "0 return 0 def main(): a = Application() sys.exit(a.exec_()) if", "- 1 # Extract cpp method codes rest_lines = self._source_lines[start_line_index:]", "\"\\n\": is_finding_single_comment = False elif (c == \"/\") and (content[i", "if len(stack) <= 0: content = content[first_i:last_i] founded = re.findall(r\"\\w+\\W+(\\w+)\\s*=[^=]\",", "re.escape(name) # Operator methods chars = [] for achar in", "# path if self.__args.output is None: self.__args.output = self.__args.file_path self.__config", "re.search(name_re, content).span()[0] parameters_start_pos = content.index('(', name_start_pos) parameters_stop_pos = content.index(')', parameters_start_pos)", "\"--output\", help=\"Output file path\") parser.add_argument(\"-d\", \"--debug\", action='store_true', help=\"Print trace stack\")", "except CppHeaderParser.CppHeaderParser.CppParseError as e: # CppHeaderParser can't parse this file,", "CppNamespace(dict): def __init__(self): self[\"name\"] = None self[\"line_number\"] = -1 class", "elif \"name\" in cpp_object: cpp_object_name = cpp_object[\"name\"] if ('<' in", "self.__config[\"_base_\"].update(old_base) def parse_define(self, adefine): matched = re.match(r\"[^\\w]*(\\w+)(?:\\(([^\\)]*)\\)|\\s*).*\", adefine) name =", "break if self._get_class_realname(cpp_object[\"class\"]) == cpp_object[\"name\"]: # Constructor / Destructor will", "Constructor / Destructor will the same with class name break", "= \"%s %s\" % ( ' '.join([rule_name.capitalize() for rule_name in", "result = CppDefine() result[\"name\"] = name result[\"parameters\"] = parameters return", "self._validate_name(cpp_object, \"define_function\") for aparameter in cpp_object[\"parameters\"]: self._validate_name(aparameter, \"define_function_argument\") elif cpp_object_type", "\"class\" class_method_re = \"class_method\" class_method_argument_re = \"class_method_argument\" class_variant_re = \"class_variant\"", "self._validate_cpp_object(cpp_object) # Verify Struct Names for cpp_object in parsed_info.structs_order: self._validate_cpp_object(cpp_object)", "copy import six import os.path import traceback class CppDefine(dict): def", "exec_(self): try: with open(self.__args.file_path, \"r\") as source_file: # For later", "\"{\": stack.append(i) elif c == \"}\": last_i = i del", "founded = re.findall(r\"\\w+\\W+(\\w+)\\s*=[^=]\", content) for aname in founded: avariant =", "i) - 2 stack.append(i) i += 1 first_i = i", "cpp_object[\"type\"] != \"return\": if cpp_object[\"static\"]: self._validate_name(cpp_object, \"static_variant\") elif cpp_object[\"type\"] not", "else: return # Parse union like names splitted = cpp_object_name.split()", "\"union\": \"struct\", } my_config = dict() if name in override_table:", "= [] name_re = self._get_cpp_method_re(cpp_method[\"name\"]) name_start_pos = re.search(name_re, content).span()[0] parameters_start_pos", "== \"__main__\": # Execute only if run as a script", "is_finding_block_comment = False is_finding_single_comment = False while (len(stack) > 0)", "in internal_predeclares: is_skip_validate = True if not is_skip_validate: if amember[\"static\"]:", "if is_finding_block_comment: # If finding block comment, then skip all", "\"debug\" in cpp_object: if re.match(r\".*\\>\\s*{$\", cpp_object[\"debug\"]) is not None: break", "import re import sys import yaml import copy import six", "class_method_argument_re) else: self._validate_name( {\"name\":matched.group(1), \"line_number\":amethod[\"line_number\"]}, \"typedef\") for access_specifier in CppHeaderParser.supportedAccessSpecifier:", "\"\", } self.__config[\"_base_\"].update(old_base) def parse_define(self, adefine): matched = re.match(r\"[^\\w]*(\\w+)(?:\\(([^\\)]*)\\)|\\s*).*\", adefine)", "error_message) if self.__args.debug: traceback.print_stack() raise SyntaxError(\"%s:%s:error: Name '%s' isn't matched", "semicolon, just ignored. pass skipped_lines = cpp_method[\"line_number\"] + content.count(\"\\n\", 0,", "1) if semicolonPos <= i: return; except ValueError: # Not", "isn't matched with rule : %s! %s\" % ( filename,", "amethod_name) if len(founded) <= 0: if re.match(r\"(?:^|.*\\W)operator\\W.*\", amethod[\"debug\"]) is not", "\"function_argument\", \"define_function\": \"function\", \"class_method\": \"function\", \"struct_method\": \"class_method\", \"class_variant\": \"variant\", \"struct_variant\":", "(not amethod.get(\"constructor\", False)) and (not amethod.get(\"destructor\", False))): try: self._validate_name(amethod, class_method_re)", "'\\n'.join(rest_lines) code_lines = [] name_re = self._get_cpp_method_re(cpp_method[\"name\"]) name_start_pos = re.search(name_re," ]
[ "headers = \"\".join( [f\"{h.key}:{h.value}\\r\\n\" for h in self._response.headers] ) print(f'headers:", "resp.Response.create_empty() # type: resp.Response self.sender = wfile def send(self): \"\"\"像浏览器发送包", "\"\"\" if self._response is None: self._response = resp.Response.create_empty() h =", "{self._response.line.code}\\r\\n\" self.sender.write(bytes(line, 'utf-8')) self.add_header(key='Content-Length', value=len(self._response.body.content)) headers = \"\".join( [f\"{h.key}:{h.value}\\r\\n\" for", "\"\".join( [f\"{h.key}:{h.value}\\r\\n\" for h in self._response.headers] ) print(f'headers: {headers}') self.sender.write(bytes(headers,", "class WsgiHandel(BaseHTTPRequestHandler): def handle(self) -> None: handle_response = SimpleHandler(self.wfile) handle_response.send()", "is None: self._response = resp.Response.create_empty() h = resp.Headers(key=key, value=value) self._response.headers.append(h)", "wfile def send(self): \"\"\"像浏览器发送包 node: 下面分成了三次发送,因为合在发送会有 bug,不确定问题,暂时先这样 \"\"\" line =", "print(f'headers: {headers}') self.sender.write(bytes(headers, 'utf-8')) body = f\"\\r\\n{self._response.body.content}\" self.sender.write(bytes(body, 'utf-8')) def", "from . import response as resp class WsgiServer(HTTPServer): pass class", "handle_response = SimpleHandler(self.wfile) handle_response.send() class SimpleHandler: def __init__(self, wfile): self._response", "def __init__(self, wfile): self._response = resp.Response.create_empty() # type: resp.Response self.sender", "self._response = resp.Response.create_empty() # type: resp.Response self.sender = wfile def", "handle_response.send() class SimpleHandler: def __init__(self, wfile): self._response = resp.Response.create_empty() #", ". import response as resp class WsgiServer(HTTPServer): pass class WsgiHandel(BaseHTTPRequestHandler):", "type: resp.Response self.sender = wfile def send(self): \"\"\"像浏览器发送包 node: 下面分成了三次发送,因为合在发送会有", "-> t.List[resp.Headers]: \"\"\"添加请求头键值对 Args: key: 键 value: 值 Return: 存在的所有键值对信息", "self.add_header(key='Content-Length', value=len(self._response.body.content)) headers = \"\".join( [f\"{h.key}:{h.value}\\r\\n\" for h in self._response.headers]", "\"\"\" line = f\"{self._response.line.version} {self._response.line.code} {self._response.line.code}\\r\\n\" self.sender.write(bytes(line, 'utf-8')) self.add_header(key='Content-Length', value=len(self._response.body.content))", "None: handle_response = SimpleHandler(self.wfile) handle_response.send() class SimpleHandler: def __init__(self, wfile):", "f\"{self._response.line.version} {self._response.line.code} {self._response.line.code}\\r\\n\" self.sender.write(bytes(line, 'utf-8')) self.add_header(key='Content-Length', value=len(self._response.body.content)) headers = \"\".join(", "t.Any) -> t.List[resp.Headers]: \"\"\"添加请求头键值对 Args: key: 键 value: 值 Return:", "pass class WsgiHandel(BaseHTTPRequestHandler): def handle(self) -> None: handle_response = SimpleHandler(self.wfile)", "= \"\".join( [f\"{h.key}:{h.value}\\r\\n\" for h in self._response.headers] ) print(f'headers: {headers}')", "resp class WsgiServer(HTTPServer): pass class WsgiHandel(BaseHTTPRequestHandler): def handle(self) -> None:", "WsgiServer(HTTPServer): pass class WsgiHandel(BaseHTTPRequestHandler): def handle(self) -> None: handle_response =", "\"\"\"像浏览器发送包 node: 下面分成了三次发送,因为合在发送会有 bug,不确定问题,暂时先这样 \"\"\" line = f\"{self._response.line.version} {self._response.line.code} {self._response.line.code}\\r\\n\"", "body = f\"\\r\\n{self._response.body.content}\" self.sender.write(bytes(body, 'utf-8')) def add_header(self, key: str, value:", "Return: 存在的所有键值对信息 \"\"\" if self._response is None: self._response = resp.Response.create_empty()", "value=len(self._response.body.content)) headers = \"\".join( [f\"{h.key}:{h.value}\\r\\n\" for h in self._response.headers] )", "h in self._response.headers] ) print(f'headers: {headers}') self.sender.write(bytes(headers, 'utf-8')) body =", "= resp.Response.create_empty() # type: resp.Response self.sender = wfile def send(self):", "-> None: handle_response = SimpleHandler(self.wfile) handle_response.send() class SimpleHandler: def __init__(self,", "WsgiHandel(BaseHTTPRequestHandler): def handle(self) -> None: handle_response = SimpleHandler(self.wfile) handle_response.send() class", "send(self): \"\"\"像浏览器发送包 node: 下面分成了三次发送,因为合在发送会有 bug,不确定问题,暂时先这样 \"\"\" line = f\"{self._response.line.version} {self._response.line.code}", "t from http.server import HTTPServer, BaseHTTPRequestHandler from . import response", "def send(self): \"\"\"像浏览器发送包 node: 下面分成了三次发送,因为合在发送会有 bug,不确定问题,暂时先这样 \"\"\" line = f\"{self._response.line.version}", "'utf-8')) self.add_header(key='Content-Length', value=len(self._response.body.content)) headers = \"\".join( [f\"{h.key}:{h.value}\\r\\n\" for h in", "line = f\"{self._response.line.version} {self._response.line.code} {self._response.line.code}\\r\\n\" self.sender.write(bytes(line, 'utf-8')) self.add_header(key='Content-Length', value=len(self._response.body.content)) headers", "= SimpleHandler(self.wfile) handle_response.send() class SimpleHandler: def __init__(self, wfile): self._response =", "from http.server import HTTPServer, BaseHTTPRequestHandler from . import response as", "key: 键 value: 值 Return: 存在的所有键值对信息 \"\"\" if self._response is", "http.server import HTTPServer, BaseHTTPRequestHandler from . import response as resp", "值 Return: 存在的所有键值对信息 \"\"\" if self._response is None: self._response =", "self._response is None: self._response = resp.Response.create_empty() h = resp.Headers(key=key, value=value)", "[f\"{h.key}:{h.value}\\r\\n\" for h in self._response.headers] ) print(f'headers: {headers}') self.sender.write(bytes(headers, 'utf-8'))", "SimpleHandler: def __init__(self, wfile): self._response = resp.Response.create_empty() # type: resp.Response", "键 value: 值 Return: 存在的所有键值对信息 \"\"\" if self._response is None:", "__init__(self, wfile): self._response = resp.Response.create_empty() # type: resp.Response self.sender =", "str, value: t.Any) -> t.List[resp.Headers]: \"\"\"添加请求头键值对 Args: key: 键 value:", "import HTTPServer, BaseHTTPRequestHandler from . import response as resp class", "self.sender.write(bytes(headers, 'utf-8')) body = f\"\\r\\n{self._response.body.content}\" self.sender.write(bytes(body, 'utf-8')) def add_header(self, key:", "import response as resp class WsgiServer(HTTPServer): pass class WsgiHandel(BaseHTTPRequestHandler): def", "# type: resp.Response self.sender = wfile def send(self): \"\"\"像浏览器发送包 node:", "node: 下面分成了三次发送,因为合在发送会有 bug,不确定问题,暂时先这样 \"\"\" line = f\"{self._response.line.version} {self._response.line.code} {self._response.line.code}\\r\\n\" self.sender.write(bytes(line,", "{headers}') self.sender.write(bytes(headers, 'utf-8')) body = f\"\\r\\n{self._response.body.content}\" self.sender.write(bytes(body, 'utf-8')) def add_header(self,", "Args: key: 键 value: 值 Return: 存在的所有键值对信息 \"\"\" if self._response", "add_header(self, key: str, value: t.Any) -> t.List[resp.Headers]: \"\"\"添加请求头键值对 Args: key:", "HTTPServer, BaseHTTPRequestHandler from . import response as resp class WsgiServer(HTTPServer):", "bug,不确定问题,暂时先这样 \"\"\" line = f\"{self._response.line.version} {self._response.line.code} {self._response.line.code}\\r\\n\" self.sender.write(bytes(line, 'utf-8')) self.add_header(key='Content-Length',", "= f\"\\r\\n{self._response.body.content}\" self.sender.write(bytes(body, 'utf-8')) def add_header(self, key: str, value: t.Any)", "value: t.Any) -> t.List[resp.Headers]: \"\"\"添加请求头键值对 Args: key: 键 value: 值", "self.sender.write(bytes(line, 'utf-8')) self.add_header(key='Content-Length', value=len(self._response.body.content)) headers = \"\".join( [f\"{h.key}:{h.value}\\r\\n\" for h", "typing as t from http.server import HTTPServer, BaseHTTPRequestHandler from .", "'utf-8')) def add_header(self, key: str, value: t.Any) -> t.List[resp.Headers]: \"\"\"添加请求头键值对", "in self._response.headers] ) print(f'headers: {headers}') self.sender.write(bytes(headers, 'utf-8')) body = f\"\\r\\n{self._response.body.content}\"", "SimpleHandler(self.wfile) handle_response.send() class SimpleHandler: def __init__(self, wfile): self._response = resp.Response.create_empty()", "self._response.headers] ) print(f'headers: {headers}') self.sender.write(bytes(headers, 'utf-8')) body = f\"\\r\\n{self._response.body.content}\" self.sender.write(bytes(body,", "def handle(self) -> None: handle_response = SimpleHandler(self.wfile) handle_response.send() class SimpleHandler:", "None: self._response = resp.Response.create_empty() h = resp.Headers(key=key, value=value) self._response.headers.append(h) return", "= wfile def send(self): \"\"\"像浏览器发送包 node: 下面分成了三次发送,因为合在发送会有 bug,不确定问题,暂时先这样 \"\"\" line", "self.sender = wfile def send(self): \"\"\"像浏览器发送包 node: 下面分成了三次发送,因为合在发送会有 bug,不确定问题,暂时先这样 \"\"\"", "value: 值 Return: 存在的所有键值对信息 \"\"\" if self._response is None: self._response", ") print(f'headers: {headers}') self.sender.write(bytes(headers, 'utf-8')) body = f\"\\r\\n{self._response.body.content}\" self.sender.write(bytes(body, 'utf-8'))", "if self._response is None: self._response = resp.Response.create_empty() h = resp.Headers(key=key,", "f\"\\r\\n{self._response.body.content}\" self.sender.write(bytes(body, 'utf-8')) def add_header(self, key: str, value: t.Any) ->", "'utf-8')) body = f\"\\r\\n{self._response.body.content}\" self.sender.write(bytes(body, 'utf-8')) def add_header(self, key: str,", "class WsgiServer(HTTPServer): pass class WsgiHandel(BaseHTTPRequestHandler): def handle(self) -> None: handle_response", "def add_header(self, key: str, value: t.Any) -> t.List[resp.Headers]: \"\"\"添加请求头键值对 Args:", "下面分成了三次发送,因为合在发送会有 bug,不确定问题,暂时先这样 \"\"\" line = f\"{self._response.line.version} {self._response.line.code} {self._response.line.code}\\r\\n\" self.sender.write(bytes(line, 'utf-8'))", "{self._response.line.code} {self._response.line.code}\\r\\n\" self.sender.write(bytes(line, 'utf-8')) self.add_header(key='Content-Length', value=len(self._response.body.content)) headers = \"\".join( [f\"{h.key}:{h.value}\\r\\n\"", "= f\"{self._response.line.version} {self._response.line.code} {self._response.line.code}\\r\\n\" self.sender.write(bytes(line, 'utf-8')) self.add_header(key='Content-Length', value=len(self._response.body.content)) headers =", "as t from http.server import HTTPServer, BaseHTTPRequestHandler from . import", "\"\"\"添加请求头键值对 Args: key: 键 value: 值 Return: 存在的所有键值对信息 \"\"\" if", "resp.Response self.sender = wfile def send(self): \"\"\"像浏览器发送包 node: 下面分成了三次发送,因为合在发送会有 bug,不确定问题,暂时先这样", "class SimpleHandler: def __init__(self, wfile): self._response = resp.Response.create_empty() # type:", "key: str, value: t.Any) -> t.List[resp.Headers]: \"\"\"添加请求头键值对 Args: key: 键", "存在的所有键值对信息 \"\"\" if self._response is None: self._response = resp.Response.create_empty() h", "handle(self) -> None: handle_response = SimpleHandler(self.wfile) handle_response.send() class SimpleHandler: def", "self.sender.write(bytes(body, 'utf-8')) def add_header(self, key: str, value: t.Any) -> t.List[resp.Headers]:", "response as resp class WsgiServer(HTTPServer): pass class WsgiHandel(BaseHTTPRequestHandler): def handle(self)", "for h in self._response.headers] ) print(f'headers: {headers}') self.sender.write(bytes(headers, 'utf-8')) body", "self._response = resp.Response.create_empty() h = resp.Headers(key=key, value=value) self._response.headers.append(h) return self._response.headers", "BaseHTTPRequestHandler from . import response as resp class WsgiServer(HTTPServer): pass", "wfile): self._response = resp.Response.create_empty() # type: resp.Response self.sender = wfile", "import typing as t from http.server import HTTPServer, BaseHTTPRequestHandler from", "as resp class WsgiServer(HTTPServer): pass class WsgiHandel(BaseHTTPRequestHandler): def handle(self) ->", "t.List[resp.Headers]: \"\"\"添加请求头键值对 Args: key: 键 value: 值 Return: 存在的所有键值对信息 \"\"\"", "<gh_stars>0 import typing as t from http.server import HTTPServer, BaseHTTPRequestHandler" ]
[ "is 2 dimensional an explicit time step dimension will be", "X bsz X embed_dim # attn_weights.shape = bsz X T", "keys and values are both set to encoder output Inputs", "key, value, key_padding_mask=src_len_mask, need_weights=True ) # attn.shape = T X", "decoder_state: (bsz x decoder_hidden_state_dim) or (bsz x T x decoder_hidden_state_dim)", "# attn_weights.shape = src_len X T X bsz if squeeze:", "decoder_hidden_state_dim) source_hids: srclen x bsz x context_dim src_lengths: bsz x", ": [batch_size, d_model] \"\"\" def __init__( self, decoder_hidden_state_dim, context_dim, *,", "1 attn, attn_weights = self._fair_attn.forward( query, key, value, key_padding_mask=src_len_mask, need_weights=True", "2 or 3 dimensional\") query = query.transpose(0, 1) value =", "[sequence length, batch size, d_model] key: [sequence length, batch size,", "decoder_state.dim() == 3 & squeeze & T != 1 or", "= attn.squeeze(0) # attn.shape = squeeze(T) X bsz X embed_dim", "[batch_size, 1, seq_len] src_len_mask_int = attention_utils.create_src_lengths_mask( batch_size=batch_size, src_lengths=src_lengths ) src_len_mask", "2 & squeeze or [batch_size, 1, max_src_len] if decoder_state.dim() ==", "need_weights=True ) # attn.shape = T X bsz X embed_dim", "fair_multihead.MultiheadAttention(d_model, nheads) self.use_src_length_mask = src_length_mask def forward(self, decoder_state, source_hids, src_lengths,", "== 3 & !squeeze or [batch_size, T, max_src_len] if decoder_state.dim()", "be unsqueezed. Outputs: [batch_size, max_src_len] if decoder_state.dim() == 2 &", "decoding\" ) self._fair_attn = fair_multihead.MultiheadAttention(d_model, nheads) self.use_src_length_mask = src_length_mask def", "or not to squeeze on the time dimension. Even if", "lengths squeeze: Whether or not to squeeze on the time", "attn_weights.shape = src_len X T X bsz if squeeze: attn", ": [sequence length, batch size, d_model] src_lengths : [batch size]", "context_dim d_model = decoder_hidden_state_dim # for brevity assert d_model %", "encoder output Inputs init: decoder_hidden_state_dim : dimensionality of decoder hidden", "or 3 dimensional\") query = query.transpose(0, 1) value = key", "bsz x context_dim src_lengths: bsz x 1, actual sequence lengths", "unseen_mask: if True, only attend to previous sequence positions src_lengths_mask:", "VW_i^V) Similarly to the above, d_k = d_v = d_model", "T X bsz if squeeze: attn = attn.squeeze(0) # attn.shape", "squeeze(T) X bsz X embed_dim attn_weights = attn_weights.squeeze(1) # attn_weights.shape", "d_k = d_v = d_model / h In this implementation,", "bsz X T X src_len attn_weights = attn_weights.transpose(0, 2) #", "!= 1 or [batch_size, max_src_len] if decoder_state.dim() == 3 &", "batch size, d_model] key: [sequence length, batch size, d_model] value:", "self._fair_attn.forward( query, key, value, key_padding_mask=src_len_mask, need_weights=True ) # attn.shape =", "length, batch size, d_model] Output result : [batch_size, d_model] \"\"\"", "not None and self.use_src_length_mask: # [batch_size, 1, seq_len] src_len_mask_int =", "for brevity assert d_model % nheads == 0 if unseen_mask:", "d_v = d_model / h In this implementation, keys and", "# [batch_size, 1, seq_len] src_len_mask_int = attention_utils.create_src_lengths_mask( batch_size=batch_size, src_lengths=src_lengths )", "X embed_dim attn_weights = attn_weights.squeeze(1) # attn_weights.shape = src_len X", "nheads : integer # of attention heads unseen_mask: if True,", "python3 from fairseq.modules import multihead_attention as fair_multihead from pytorch_translate.attention import", "source_hids: srclen x bsz x context_dim src_lengths: bsz x 1,", "batch size, d_model] value: [sequence length, batch size, d_model] Output", "& squeeze & T == 1 \"\"\" batch_size = decoder_state.shape[0]", "Inputs: decoder_state: (bsz x decoder_hidden_state_dim) or (bsz x T x", "X squeeze(T) X bsz return attn, attn_weights return attn, attn_weights", "d_model] key: [sequence length, batch size, d_model] value: [sequence length,", "size] forward: query : [sequence length, batch size, d_model] key:", "1, actual sequence lengths squeeze: Whether or not to squeeze", "2) # attn_weights.shape = src_len X T X bsz if", "d_model % nheads == 0 if unseen_mask: raise NotImplementedError( \"Unseen", "= Concat(head_1,...,head_h)W^O where head_i = Attention(QW_i^Q, KW_i^K, VW_i^V) Similarly to", ": integer # of attention heads unseen_mask: if True, only", "and self.use_src_length_mask: # [batch_size, 1, seq_len] src_len_mask_int = attention_utils.create_src_lengths_mask( batch_size=batch_size,", "init: decoder_hidden_state_dim : dimensionality of decoder hidden state context_dim :", "== 3 & squeeze & T == 1 \"\"\" batch_size", "query = decoder_state.unsqueeze(1) else: raise ValueError(\"decoder state must be either", "\"\"\" Multiheaded Scaled Dot Product Attention Implements equation: MultiHead(Q, K,", "or [batch_size, max_src_len] if decoder_state.dim() == 3 & squeeze &", "step dimension will be unsqueezed. Outputs: [batch_size, max_src_len] if decoder_state.dim()", "embed_dim attn_weights = attn_weights.squeeze(1) # attn_weights.shape = src_len X squeeze(T)", "batch_size=batch_size, src_lengths=src_lengths ) src_len_mask = src_len_mask_int != 1 attn, attn_weights", "super().__init__(decoder_hidden_state_dim, context_dim) assert decoder_hidden_state_dim == context_dim d_model = decoder_hidden_state_dim #", "if decoder_state.dim() == 2 & !squeeze or [batch_size, T, max_src_len]", "src_length_mask=True ): super().__init__(decoder_hidden_state_dim, context_dim) assert decoder_hidden_state_dim == context_dim d_model =", "key: [sequence length, batch size, d_model] value: [sequence length, batch", "be either 2 or 3 dimensional\") query = query.transpose(0, 1)", "Whether or not to squeeze on the time dimension. Even", "elif decoder_state.dim() == 2: query = decoder_state.unsqueeze(1) else: raise ValueError(\"decoder", "/ h In this implementation, keys and values are both", "x context_dim src_lengths: bsz x 1, actual sequence lengths squeeze:", "embed_dim # attn_weights.shape = bsz X T X src_len attn_weights", "== 2: query = decoder_state.unsqueeze(1) else: raise ValueError(\"decoder state must", "d_model = decoder_hidden_state_dim # for brevity assert d_model % nheads", "query.transpose(0, 1) value = key = source_hids src_len_mask = None", "X T X bsz if squeeze: attn = attn.squeeze(0) #", "NotImplementedError( \"Unseen mask not supported with sequential decoding\" ) self._fair_attn", "== context_dim d_model = decoder_hidden_state_dim # for brevity assert d_model", "!= 1 attn, attn_weights = self._fair_attn.forward( query, key, value, key_padding_mask=src_len_mask,", "multihead_attention as fair_multihead from pytorch_translate.attention import ( BaseAttention, attention_utils, register_attention,", "integer # of attention heads unseen_mask: if True, only attend", "src_len_mask_int = attention_utils.create_src_lengths_mask( batch_size=batch_size, src_lengths=src_lengths ) src_len_mask = src_len_mask_int !=", "decoder_state.dim() == 3 & !squeeze or [batch_size, T, max_src_len] if", "[sequence length, batch size, d_model] src_lengths : [batch size] forward:", "decoder_hidden_state_dim # for brevity assert d_model % nheads == 0", "3: query = decoder_state elif decoder_state.dim() == 2: query =", "this implementation, keys and values are both set to encoder", "x 1, actual sequence lengths squeeze: Whether or not to", "import multihead_attention as fair_multihead from pytorch_translate.attention import ( BaseAttention, attention_utils,", "if squeeze: attn = attn.squeeze(0) # attn.shape = squeeze(T) X", "if True, mask padding based on src_lengths forward: decoder_state :", "ValueError(\"decoder state must be either 2 or 3 dimensional\") query", "# attn_weights.shape = bsz X T X src_len attn_weights =", "result : [batch_size, d_model] \"\"\" def __init__( self, decoder_hidden_state_dim, context_dim,", "2 & !squeeze or [batch_size, T, max_src_len] if decoder_state.dim() ==", "fairseq.modules import multihead_attention as fair_multihead from pytorch_translate.attention import ( BaseAttention,", "Scaled Dot Product Attention Implements equation: MultiHead(Q, K, V) =", "src_length_mask def forward(self, decoder_state, source_hids, src_lengths, squeeze=True): \"\"\" Computes MultiheadAttention", "to encoder output Inputs init: decoder_hidden_state_dim : dimensionality of decoder", "True, mask padding based on src_lengths forward: decoder_state : [batch", "forward: query : [sequence length, batch size, d_model] key: [sequence", "Concat(head_1,...,head_h)W^O where head_i = Attention(QW_i^Q, KW_i^K, VW_i^V) Similarly to the", "*, nheads=1, unseen_mask=False, src_length_mask=True ): super().__init__(decoder_hidden_state_dim, context_dim) assert decoder_hidden_state_dim ==", "= src_len X T X bsz if squeeze: attn =", "d_model] src_lengths : [batch size] forward: query : [sequence length,", "both set to encoder output Inputs init: decoder_hidden_state_dim : dimensionality", "of decoder hidden state context_dim : dimensionality of encoder output", "raise NotImplementedError( \"Unseen mask not supported with sequential decoding\" )", "positions src_lengths_mask: if True, mask padding based on src_lengths forward:", "d_model] Output result : [batch_size, d_model] \"\"\" def __init__( self,", "forward(self, decoder_state, source_hids, src_lengths, squeeze=True): \"\"\" Computes MultiheadAttention with respect", ": dimensionality of decoder hidden state context_dim : dimensionality of", "# attn.shape = T X bsz X embed_dim # attn_weights.shape", "assert d_model % nheads == 0 if unseen_mask: raise NotImplementedError(", "squeeze: attn = attn.squeeze(0) # attn.shape = squeeze(T) X bsz", "if decoder_state.dim() == 3 & squeeze & T != 1", "attn_weights.squeeze(1) # attn_weights.shape = src_len X squeeze(T) X bsz return", "output Inputs init: decoder_hidden_state_dim : dimensionality of decoder hidden state", "vector or a tensor Inputs: decoder_state: (bsz x decoder_hidden_state_dim) or", "2 dimensional an explicit time step dimension will be unsqueezed.", "is not None and self.use_src_length_mask: # [batch_size, 1, seq_len] src_len_mask_int", "attn_weights.shape = src_len X squeeze(T) X bsz return attn, attn_weights", "Similarly to the above, d_k = d_v = d_model /", "!squeeze or [batch_size, T, max_src_len] if decoder_state.dim() == 3 &", ") # attn.shape = T X bsz X embed_dim #", "1) value = key = source_hids src_len_mask = None if", "# of attention heads unseen_mask: if True, only attend to", ") @register_attention(\"multihead\") class MultiheadAttention(BaseAttention): \"\"\" Multiheaded Scaled Dot Product Attention", "and values are both set to encoder output Inputs init:", "<filename>pytorch_translate/attention/multihead_attention.py #!/usr/bin/env python3 from fairseq.modules import multihead_attention as fair_multihead from", "unsqueezed. Outputs: [batch_size, max_src_len] if decoder_state.dim() == 2 & squeeze", "src_lengths is not None and self.use_src_length_mask: # [batch_size, 1, seq_len]", "attn.shape = T X bsz X embed_dim # attn_weights.shape =", "self.use_src_length_mask = src_length_mask def forward(self, decoder_state, source_hids, src_lengths, squeeze=True): \"\"\"", "bsz X embed_dim # attn_weights.shape = bsz X T X", "bsz X embed_dim attn_weights = attn_weights.squeeze(1) # attn_weights.shape = src_len", "value: [sequence length, batch size, d_model] Output result : [batch_size,", "T == 1 \"\"\" batch_size = decoder_state.shape[0] if decoder_state.dim() ==", "dimensionality of decoder hidden state context_dim : dimensionality of encoder", "tensor Inputs: decoder_state: (bsz x decoder_hidden_state_dim) or (bsz x T", "h In this implementation, keys and values are both set", "query, key, value, key_padding_mask=src_len_mask, need_weights=True ) # attn.shape = T", "supported with sequential decoding\" ) self._fair_attn = fair_multihead.MultiheadAttention(d_model, nheads) self.use_src_length_mask", "not supported with sequential decoding\" ) self._fair_attn = fair_multihead.MultiheadAttention(d_model, nheads)", "T x decoder_hidden_state_dim) source_hids: srclen x bsz x context_dim src_lengths:", "max_src_len] if decoder_state.dim() == 3 & squeeze & T ==", "value, key_padding_mask=src_len_mask, need_weights=True ) # attn.shape = T X bsz", "= src_length_mask def forward(self, decoder_state, source_hids, src_lengths, squeeze=True): \"\"\" Computes", "value = key = source_hids src_len_mask = None if src_lengths", "attention heads unseen_mask: if True, only attend to previous sequence", "head_i = Attention(QW_i^Q, KW_i^K, VW_i^V) Similarly to the above, d_k", "based on src_lengths forward: decoder_state : [batch size, d_model] source_hids", "In this implementation, keys and values are both set to", "def __init__( self, decoder_hidden_state_dim, context_dim, *, nheads=1, unseen_mask=False, src_length_mask=True ):", "to previous sequence positions src_lengths_mask: if True, mask padding based", "attend to previous sequence positions src_lengths_mask: if True, mask padding", "= source_hids src_len_mask = None if src_lengths is not None", "if decoder_state.dim() is 2 dimensional an explicit time step dimension", "src_len_mask_int != 1 attn, attn_weights = self._fair_attn.forward( query, key, value,", "= self._fair_attn.forward( query, key, value, key_padding_mask=src_len_mask, need_weights=True ) # attn.shape", "where head_i = Attention(QW_i^Q, KW_i^K, VW_i^V) Similarly to the above,", "d_model] \"\"\" def __init__( self, decoder_hidden_state_dim, context_dim, *, nheads=1, unseen_mask=False,", "\"Unseen mask not supported with sequential decoding\" ) self._fair_attn =", "( BaseAttention, attention_utils, register_attention, ) @register_attention(\"multihead\") class MultiheadAttention(BaseAttention): \"\"\" Multiheaded", "1 \"\"\" batch_size = decoder_state.shape[0] if decoder_state.dim() == 3: query", "decoder_state.dim() == 2 & squeeze or [batch_size, 1, max_src_len] if", "unseen_mask: raise NotImplementedError( \"Unseen mask not supported with sequential decoding\"", "== 2 & squeeze or [batch_size, 1, max_src_len] if decoder_state.dim()", "KW_i^K, VW_i^V) Similarly to the above, d_k = d_v =", "unseen_mask=False, src_length_mask=True ): super().__init__(decoder_hidden_state_dim, context_dim) assert decoder_hidden_state_dim == context_dim d_model", "attn_weights = attn_weights.squeeze(1) # attn_weights.shape = src_len X squeeze(T) X", "pytorch_translate.attention import ( BaseAttention, attention_utils, register_attention, ) @register_attention(\"multihead\") class MultiheadAttention(BaseAttention):", "squeeze on the time dimension. Even if decoder_state.dim() is 2", "== 3: query = decoder_state elif decoder_state.dim() == 2: query", "1 or [batch_size, max_src_len] if decoder_state.dim() == 3 & squeeze", "previous sequence positions src_lengths_mask: if True, mask padding based on", "mask padding based on src_lengths forward: decoder_state : [batch size,", "squeeze & T == 1 \"\"\" batch_size = decoder_state.shape[0] if", "decoder_state.dim() == 3 & squeeze & T == 1 \"\"\"", "& squeeze or [batch_size, 1, max_src_len] if decoder_state.dim() == 2", "dimensional\") query = query.transpose(0, 1) value = key = source_hids", "src_lengths : [batch size] forward: query : [sequence length, batch", "register_attention, ) @register_attention(\"multihead\") class MultiheadAttention(BaseAttention): \"\"\" Multiheaded Scaled Dot Product", "= squeeze(T) X bsz X embed_dim attn_weights = attn_weights.squeeze(1) #", "src_lengths: bsz x 1, actual sequence lengths squeeze: Whether or", "= attn_weights.squeeze(1) # attn_weights.shape = src_len X squeeze(T) X bsz", "X src_len attn_weights = attn_weights.transpose(0, 2) # attn_weights.shape = src_len", "src_lengths=src_lengths ) src_len_mask = src_len_mask_int != 1 attn, attn_weights =", "T, max_src_len] if decoder_state.dim() == 3 & squeeze & T", "of attention heads unseen_mask: if True, only attend to previous", "max_src_len] if decoder_state.dim() == 3 & !squeeze or [batch_size, T,", "Product Attention Implements equation: MultiHead(Q, K, V) = Concat(head_1,...,head_h)W^O where", "explicit time step dimension will be unsqueezed. Outputs: [batch_size, max_src_len]", "nheads=1, unseen_mask=False, src_length_mask=True ): super().__init__(decoder_hidden_state_dim, context_dim) assert decoder_hidden_state_dim == context_dim", "as fair_multihead from pytorch_translate.attention import ( BaseAttention, attention_utils, register_attention, )", "mask not supported with sequential decoding\" ) self._fair_attn = fair_multihead.MultiheadAttention(d_model,", "== 3 & squeeze & T != 1 or [batch_size,", "= None if src_lengths is not None and self.use_src_length_mask: #", "attn_weights = attn_weights.transpose(0, 2) # attn_weights.shape = src_len X T", "[batch_size, 1, max_src_len] if decoder_state.dim() == 2 & !squeeze or", "sequence positions src_lengths_mask: if True, mask padding based on src_lengths", "if unseen_mask: raise NotImplementedError( \"Unseen mask not supported with sequential", "decoder_hidden_state_dim == context_dim d_model = decoder_hidden_state_dim # for brevity assert", "either 2 or 3 dimensional\") query = query.transpose(0, 1) value", "kwargs : nheads : integer # of attention heads unseen_mask:", "equation: MultiHead(Q, K, V) = Concat(head_1,...,head_h)W^O where head_i = Attention(QW_i^Q,", "context_dim : dimensionality of encoder output kwargs : nheads :", "decoder_state.shape[0] if decoder_state.dim() == 3: query = decoder_state elif decoder_state.dim()", "3 & squeeze & T == 1 \"\"\" batch_size =", "squeeze or [batch_size, 1, max_src_len] if decoder_state.dim() == 2 &", "[batch_size, max_src_len] if decoder_state.dim() == 2 & squeeze or [batch_size,", "squeeze & T != 1 or [batch_size, max_src_len] if decoder_state.dim()", "if decoder_state.dim() == 2 & squeeze or [batch_size, 1, max_src_len]", "# attn.shape = squeeze(T) X bsz X embed_dim attn_weights =", "either a vector or a tensor Inputs: decoder_state: (bsz x", "length, batch size, d_model] src_lengths : [batch size] forward: query", "length, batch size, d_model] key: [sequence length, batch size, d_model]", ") self._fair_attn = fair_multihead.MultiheadAttention(d_model, nheads) self.use_src_length_mask = src_length_mask def forward(self,", "source_hids src_len_mask = None if src_lengths is not None and", "forward: decoder_state : [batch size, d_model] source_hids : [sequence length,", "dimension. Even if decoder_state.dim() is 2 dimensional an explicit time", "must be either 2 or 3 dimensional\") query = query.transpose(0,", "size, d_model] key: [sequence length, batch size, d_model] value: [sequence", "padding based on src_lengths forward: decoder_state : [batch size, d_model]", "= key = source_hids src_len_mask = None if src_lengths is", "[sequence length, batch size, d_model] value: [sequence length, batch size,", "src_len X squeeze(T) X bsz return attn, attn_weights return attn,", "else: raise ValueError(\"decoder state must be either 2 or 3", "src_len_mask = None if src_lengths is not None and self.use_src_length_mask:", ": nheads : integer # of attention heads unseen_mask: if", "1, max_src_len] if decoder_state.dim() == 2 & !squeeze or [batch_size,", "src_len attn_weights = attn_weights.transpose(0, 2) # attn_weights.shape = src_len X", "decoder_hidden_state_dim : dimensionality of decoder hidden state context_dim : dimensionality", "= T X bsz X embed_dim # attn_weights.shape = bsz", "= src_len X squeeze(T) X bsz return attn, attn_weights return", "the time dimension. Even if decoder_state.dim() is 2 dimensional an", "= decoder_hidden_state_dim # for brevity assert d_model % nheads ==", "if decoder_state.dim() == 3 & squeeze & T == 1", "d_model] value: [sequence length, batch size, d_model] Output result :", "\"\"\" batch_size = decoder_state.shape[0] if decoder_state.dim() == 3: query =", "(bsz x T x decoder_hidden_state_dim) source_hids: srclen x bsz x", "3 & !squeeze or [batch_size, T, max_src_len] if decoder_state.dim() ==", "if decoder_state.dim() == 3: query = decoder_state elif decoder_state.dim() ==", "2: query = decoder_state.unsqueeze(1) else: raise ValueError(\"decoder state must be", "decoder_state elif decoder_state.dim() == 2: query = decoder_state.unsqueeze(1) else: raise", "attn.shape = squeeze(T) X bsz X embed_dim attn_weights = attn_weights.squeeze(1)", ": [sequence length, batch size, d_model] key: [sequence length, batch", "context_dim src_lengths: bsz x 1, actual sequence lengths squeeze: Whether", "key_padding_mask=src_len_mask, need_weights=True ) # attn.shape = T X bsz X", "decoder_hidden_state_dim, context_dim, *, nheads=1, unseen_mask=False, src_length_mask=True ): super().__init__(decoder_hidden_state_dim, context_dim) assert", "heads unseen_mask: if True, only attend to previous sequence positions", "== 1 \"\"\" batch_size = decoder_state.shape[0] if decoder_state.dim() == 3:", "import ( BaseAttention, attention_utils, register_attention, ) @register_attention(\"multihead\") class MultiheadAttention(BaseAttention): \"\"\"", "attn.squeeze(0) # attn.shape = squeeze(T) X bsz X embed_dim attn_weights", "to the above, d_k = d_v = d_model / h", "attn_weights = self._fair_attn.forward( query, key, value, key_padding_mask=src_len_mask, need_weights=True ) #", "\"\"\" def __init__( self, decoder_hidden_state_dim, context_dim, *, nheads=1, unseen_mask=False, src_length_mask=True", "== 2 & !squeeze or [batch_size, T, max_src_len] if decoder_state.dim()", "= decoder_state.shape[0] if decoder_state.dim() == 3: query = decoder_state elif", "query = decoder_state elif decoder_state.dim() == 2: query = decoder_state.unsqueeze(1)", "d_model] source_hids : [sequence length, batch size, d_model] src_lengths :", "batch_size = decoder_state.shape[0] if decoder_state.dim() == 3: query = decoder_state", "= Attention(QW_i^Q, KW_i^K, VW_i^V) Similarly to the above, d_k =", "key = source_hids src_len_mask = None if src_lengths is not", "= attention_utils.create_src_lengths_mask( batch_size=batch_size, src_lengths=src_lengths ) src_len_mask = src_len_mask_int != 1", "or a tensor Inputs: decoder_state: (bsz x decoder_hidden_state_dim) or (bsz", "or [batch_size, 1, max_src_len] if decoder_state.dim() == 2 & !squeeze", "a tensor Inputs: decoder_state: (bsz x decoder_hidden_state_dim) or (bsz x", "[batch_size, T, max_src_len] if decoder_state.dim() == 3 & squeeze &", "None and self.use_src_length_mask: # [batch_size, 1, seq_len] src_len_mask_int = attention_utils.create_src_lengths_mask(", "[batch size, d_model] source_hids : [sequence length, batch size, d_model]", "respect to either a vector or a tensor Inputs: decoder_state:", ": [batch size, d_model] source_hids : [sequence length, batch size,", "src_lengths forward: decoder_state : [batch size, d_model] source_hids : [sequence", "decoder_state : [batch size, d_model] source_hids : [sequence length, batch", "source_hids : [sequence length, batch size, d_model] src_lengths : [batch", "x decoder_hidden_state_dim) or (bsz x T x decoder_hidden_state_dim) source_hids: srclen", "or [batch_size, T, max_src_len] if decoder_state.dim() == 3 & squeeze", "1, seq_len] src_len_mask_int = attention_utils.create_src_lengths_mask( batch_size=batch_size, src_lengths=src_lengths ) src_len_mask =", "X embed_dim # attn_weights.shape = bsz X T X src_len", ") src_len_mask = src_len_mask_int != 1 attn, attn_weights = self._fair_attn.forward(", "decoder_state, source_hids, src_lengths, squeeze=True): \"\"\" Computes MultiheadAttention with respect to", "dimensional an explicit time step dimension will be unsqueezed. Outputs:", "of encoder output kwargs : nheads : integer # of", "decoder_state.dim() == 2: query = decoder_state.unsqueeze(1) else: raise ValueError(\"decoder state", "= d_model / h In this implementation, keys and values", "nheads) self.use_src_length_mask = src_length_mask def forward(self, decoder_state, source_hids, src_lengths, squeeze=True):", "the above, d_k = d_v = d_model / h In", "size, d_model] value: [sequence length, batch size, d_model] Output result", "__init__( self, decoder_hidden_state_dim, context_dim, *, nheads=1, unseen_mask=False, src_length_mask=True ): super().__init__(decoder_hidden_state_dim,", "src_lengths_mask: if True, mask padding based on src_lengths forward: decoder_state", "fair_multihead from pytorch_translate.attention import ( BaseAttention, attention_utils, register_attention, ) @register_attention(\"multihead\")", "class MultiheadAttention(BaseAttention): \"\"\" Multiheaded Scaled Dot Product Attention Implements equation:", "state must be either 2 or 3 dimensional\") query =", "if src_lengths is not None and self.use_src_length_mask: # [batch_size, 1,", "not to squeeze on the time dimension. Even if decoder_state.dim()", "values are both set to encoder output Inputs init: decoder_hidden_state_dim", "or (bsz x T x decoder_hidden_state_dim) source_hids: srclen x bsz", "X bsz X embed_dim attn_weights = attn_weights.squeeze(1) # attn_weights.shape =", "source_hids, src_lengths, squeeze=True): \"\"\" Computes MultiheadAttention with respect to either", "brevity assert d_model % nheads == 0 if unseen_mask: raise", "context_dim) assert decoder_hidden_state_dim == context_dim d_model = decoder_hidden_state_dim # for", "[batch_size, max_src_len] if decoder_state.dim() == 3 & squeeze & T", "with sequential decoding\" ) self._fair_attn = fair_multihead.MultiheadAttention(d_model, nheads) self.use_src_length_mask =", "x decoder_hidden_state_dim) source_hids: srclen x bsz x context_dim src_lengths: bsz", "== 0 if unseen_mask: raise NotImplementedError( \"Unseen mask not supported", "= bsz X T X src_len attn_weights = attn_weights.transpose(0, 2)", "decoder_hidden_state_dim) or (bsz x T x decoder_hidden_state_dim) source_hids: srclen x", "src_len X T X bsz if squeeze: attn = attn.squeeze(0)", "size, d_model] source_hids : [sequence length, batch size, d_model] src_lengths", "# attn_weights.shape = src_len X squeeze(T) X bsz return attn,", "Multiheaded Scaled Dot Product Attention Implements equation: MultiHead(Q, K, V)", "Even if decoder_state.dim() is 2 dimensional an explicit time step", "decoder_state.dim() is 2 dimensional an explicit time step dimension will", "3 dimensional\") query = query.transpose(0, 1) value = key =", "Outputs: [batch_size, max_src_len] if decoder_state.dim() == 2 & squeeze or", "3 & squeeze & T != 1 or [batch_size, max_src_len]", "& T != 1 or [batch_size, max_src_len] if decoder_state.dim() ==", "decoder hidden state context_dim : dimensionality of encoder output kwargs", "def forward(self, decoder_state, source_hids, src_lengths, squeeze=True): \"\"\" Computes MultiheadAttention with", "[batch_size, d_model] \"\"\" def __init__( self, decoder_hidden_state_dim, context_dim, *, nheads=1,", "max_src_len] if decoder_state.dim() == 2 & squeeze or [batch_size, 1,", "length, batch size, d_model] value: [sequence length, batch size, d_model]", "& squeeze & T != 1 or [batch_size, max_src_len] if", "actual sequence lengths squeeze: Whether or not to squeeze on", "are both set to encoder output Inputs init: decoder_hidden_state_dim :", "decoder_state.unsqueeze(1) else: raise ValueError(\"decoder state must be either 2 or", "Output result : [batch_size, d_model] \"\"\" def __init__( self, decoder_hidden_state_dim,", "src_lengths, squeeze=True): \"\"\" Computes MultiheadAttention with respect to either a", "or [batch_size, T, max_src_len] if decoder_state.dim() == 3 & !squeeze", "T, max_src_len] if decoder_state.dim() == 3 & !squeeze or [batch_size,", "if decoder_state.dim() == 3 & !squeeze or [batch_size, T, max_src_len]", "batch size, d_model] src_lengths : [batch size] forward: query :", "= d_v = d_model / h In this implementation, keys", "Attention(QW_i^Q, KW_i^K, VW_i^V) Similarly to the above, d_k = d_v", "Attention Implements equation: MultiHead(Q, K, V) = Concat(head_1,...,head_h)W^O where head_i", "X T X src_len attn_weights = attn_weights.transpose(0, 2) # attn_weights.shape", "MultiheadAttention(BaseAttention): \"\"\" Multiheaded Scaled Dot Product Attention Implements equation: MultiHead(Q,", "= src_len_mask_int != 1 attn, attn_weights = self._fair_attn.forward( query, key,", "[batch size] forward: query : [sequence length, batch size, d_model]", "max_src_len] if decoder_state.dim() == 3 & squeeze & T !=", "x bsz x context_dim src_lengths: bsz x 1, actual sequence", "% nheads == 0 if unseen_mask: raise NotImplementedError( \"Unseen mask", "d_model / h In this implementation, keys and values are", "T X src_len attn_weights = attn_weights.transpose(0, 2) # attn_weights.shape =", "seq_len] src_len_mask_int = attention_utils.create_src_lengths_mask( batch_size=batch_size, src_lengths=src_lengths ) src_len_mask = src_len_mask_int", "hidden state context_dim : dimensionality of encoder output kwargs :", "to squeeze on the time dimension. Even if decoder_state.dim() is", "Implements equation: MultiHead(Q, K, V) = Concat(head_1,...,head_h)W^O where head_i =", "sequence lengths squeeze: Whether or not to squeeze on the", "nheads == 0 if unseen_mask: raise NotImplementedError( \"Unseen mask not", "size, d_model] Output result : [batch_size, d_model] \"\"\" def __init__(", "attn_weights.transpose(0, 2) # attn_weights.shape = src_len X T X bsz", "): super().__init__(decoder_hidden_state_dim, context_dim) assert decoder_hidden_state_dim == context_dim d_model = decoder_hidden_state_dim", "squeeze=True): \"\"\" Computes MultiheadAttention with respect to either a vector", "Inputs init: decoder_hidden_state_dim : dimensionality of decoder hidden state context_dim", "time step dimension will be unsqueezed. Outputs: [batch_size, max_src_len] if", "attention_utils.create_src_lengths_mask( batch_size=batch_size, src_lengths=src_lengths ) src_len_mask = src_len_mask_int != 1 attn,", "= decoder_state elif decoder_state.dim() == 2: query = decoder_state.unsqueeze(1) else:", "self, decoder_hidden_state_dim, context_dim, *, nheads=1, unseen_mask=False, src_length_mask=True ): super().__init__(decoder_hidden_state_dim, context_dim)", "will be unsqueezed. Outputs: [batch_size, max_src_len] if decoder_state.dim() == 2", "decoder_state.dim() == 3: query = decoder_state elif decoder_state.dim() == 2:", "srclen x bsz x context_dim src_lengths: bsz x 1, actual", "= attn_weights.transpose(0, 2) # attn_weights.shape = src_len X T X", "bsz x 1, actual sequence lengths squeeze: Whether or not", ": dimensionality of encoder output kwargs : nheads : integer", "with respect to either a vector or a tensor Inputs:", "attention_utils, register_attention, ) @register_attention(\"multihead\") class MultiheadAttention(BaseAttention): \"\"\" Multiheaded Scaled Dot", "a vector or a tensor Inputs: decoder_state: (bsz x decoder_hidden_state_dim)", "output kwargs : nheads : integer # of attention heads", "[sequence length, batch size, d_model] Output result : [batch_size, d_model]", "X bsz if squeeze: attn = attn.squeeze(0) # attn.shape =", "set to encoder output Inputs init: decoder_hidden_state_dim : dimensionality of", "above, d_k = d_v = d_model / h In this", "x T x decoder_hidden_state_dim) source_hids: srclen x bsz x context_dim", "T != 1 or [batch_size, max_src_len] if decoder_state.dim() == 3", "#!/usr/bin/env python3 from fairseq.modules import multihead_attention as fair_multihead from pytorch_translate.attention", "time dimension. Even if decoder_state.dim() is 2 dimensional an explicit", "MultiHead(Q, K, V) = Concat(head_1,...,head_h)W^O where head_i = Attention(QW_i^Q, KW_i^K,", "query : [sequence length, batch size, d_model] key: [sequence length,", "Computes MultiheadAttention with respect to either a vector or a", "max_src_len] if decoder_state.dim() == 2 & !squeeze or [batch_size, T,", "T X bsz X embed_dim # attn_weights.shape = bsz X", "attn_weights.shape = bsz X T X src_len attn_weights = attn_weights.transpose(0,", "= decoder_state.unsqueeze(1) else: raise ValueError(\"decoder state must be either 2", "batch size, d_model] Output result : [batch_size, d_model] \"\"\" def", "[batch_size, T, max_src_len] if decoder_state.dim() == 3 & !squeeze or", "encoder output kwargs : nheads : integer # of attention", "None if src_lengths is not None and self.use_src_length_mask: # [batch_size,", "squeeze: Whether or not to squeeze on the time dimension.", "MultiheadAttention with respect to either a vector or a tensor", "(bsz x decoder_hidden_state_dim) or (bsz x T x decoder_hidden_state_dim) source_hids:", "dimensionality of encoder output kwargs : nheads : integer #", "to either a vector or a tensor Inputs: decoder_state: (bsz", "from fairseq.modules import multihead_attention as fair_multihead from pytorch_translate.attention import (", "implementation, keys and values are both set to encoder output", "K, V) = Concat(head_1,...,head_h)W^O where head_i = Attention(QW_i^Q, KW_i^K, VW_i^V)", "\"\"\" Computes MultiheadAttention with respect to either a vector or", "Dot Product Attention Implements equation: MultiHead(Q, K, V) = Concat(head_1,...,head_h)W^O", "sequential decoding\" ) self._fair_attn = fair_multihead.MultiheadAttention(d_model, nheads) self.use_src_length_mask = src_length_mask", "raise ValueError(\"decoder state must be either 2 or 3 dimensional\")", "= query.transpose(0, 1) value = key = source_hids src_len_mask =", "True, only attend to previous sequence positions src_lengths_mask: if True,", "dimension will be unsqueezed. Outputs: [batch_size, max_src_len] if decoder_state.dim() ==", "query = query.transpose(0, 1) value = key = source_hids src_len_mask", "src_len_mask = src_len_mask_int != 1 attn, attn_weights = self._fair_attn.forward( query,", "size, d_model] src_lengths : [batch size] forward: query : [sequence", "& T == 1 \"\"\" batch_size = decoder_state.shape[0] if decoder_state.dim()", "only attend to previous sequence positions src_lengths_mask: if True, mask", "if True, only attend to previous sequence positions src_lengths_mask: if", "self.use_src_length_mask: # [batch_size, 1, seq_len] src_len_mask_int = attention_utils.create_src_lengths_mask( batch_size=batch_size, src_lengths=src_lengths", "bsz if squeeze: attn = attn.squeeze(0) # attn.shape = squeeze(T)", "context_dim, *, nheads=1, unseen_mask=False, src_length_mask=True ): super().__init__(decoder_hidden_state_dim, context_dim) assert decoder_hidden_state_dim", "an explicit time step dimension will be unsqueezed. Outputs: [batch_size,", "on src_lengths forward: decoder_state : [batch size, d_model] source_hids :", "@register_attention(\"multihead\") class MultiheadAttention(BaseAttention): \"\"\" Multiheaded Scaled Dot Product Attention Implements", "self._fair_attn = fair_multihead.MultiheadAttention(d_model, nheads) self.use_src_length_mask = src_length_mask def forward(self, decoder_state,", "attn = attn.squeeze(0) # attn.shape = squeeze(T) X bsz X", "= fair_multihead.MultiheadAttention(d_model, nheads) self.use_src_length_mask = src_length_mask def forward(self, decoder_state, source_hids,", "V) = Concat(head_1,...,head_h)W^O where head_i = Attention(QW_i^Q, KW_i^K, VW_i^V) Similarly", "attn, attn_weights = self._fair_attn.forward( query, key, value, key_padding_mask=src_len_mask, need_weights=True )", "from pytorch_translate.attention import ( BaseAttention, attention_utils, register_attention, ) @register_attention(\"multihead\") class", "assert decoder_hidden_state_dim == context_dim d_model = decoder_hidden_state_dim # for brevity", "& !squeeze or [batch_size, T, max_src_len] if decoder_state.dim() == 3", "state context_dim : dimensionality of encoder output kwargs : nheads", "# for brevity assert d_model % nheads == 0 if", ": [batch size] forward: query : [sequence length, batch size,", "0 if unseen_mask: raise NotImplementedError( \"Unseen mask not supported with", "on the time dimension. Even if decoder_state.dim() is 2 dimensional", "decoder_state.dim() == 2 & !squeeze or [batch_size, T, max_src_len] if", "BaseAttention, attention_utils, register_attention, ) @register_attention(\"multihead\") class MultiheadAttention(BaseAttention): \"\"\" Multiheaded Scaled" ]
[ "from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.const", "homeassistant.helpers.config_validation as cv from homeassistant import config_entries from homeassistant.const import", "password) _LOGGER.info(\"Connected to API\") hass.data[DOMAIN] = lavviebot hass.async_add_job( hass.config_entries.async_forward_entry_setup(config_entry, \"sensor\")", "\"\"\"Set up Lavviebot integration from a config entry.\"\"\" username =", "lavviebot import LavvieBotApi import homeassistant.helpers.config_validation as cv from homeassistant import", "import homeassistant.helpers.config_validation as cv from homeassistant import config_entries from homeassistant.const", "return True async def async_setup_entry(hass, config_entry): \"\"\"Set up Lavviebot integration", "import voluptuous as vol from lavviebot import LavvieBotApi import homeassistant.helpers.config_validation", "API\") hass.data[DOMAIN] = lavviebot hass.async_add_job( hass.config_entries.async_forward_entry_setup(config_entry, \"sensor\") ) return True", "from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME ) from .const import", "= await hass.async_add_executor_job(LavvieBotApi, username, password) _LOGGER.info(\"Connected to API\") hass.data[DOMAIN] =", "def setup(hass, config): \"\"\"Setup of the component\"\"\" return True async", "as vol from lavviebot import LavvieBotApi import homeassistant.helpers.config_validation as cv", "EVENT_HOMEASSISTANT_STOP from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.const import ( CONF_PASSWORD,", "from .const import DOMAIN _LOGGER = logging.getLogger(__name__) def setup(hass, config):", "async def async_setup_entry(hass, config_entry): \"\"\"Set up Lavviebot integration from a", "Purrsong LavvieBot S\"\"\" import asyncio import logging import voluptuous as", ") from .const import DOMAIN _LOGGER = logging.getLogger(__name__) def setup(hass,", "for Purrsong LavvieBot S\"\"\" import asyncio import logging import voluptuous", "homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.const import", "cv from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP from", "up Lavviebot integration from a config entry.\"\"\" username = config_entry.data.get(CONF_USERNAME)", "component\"\"\" return True async def async_setup_entry(hass, config_entry): \"\"\"Set up Lavviebot", "homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.exceptions import", "logging import voluptuous as vol from lavviebot import LavvieBotApi import", "Lavviebot integration from a config entry.\"\"\" username = config_entry.data.get(CONF_USERNAME) password", "homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME ) from .const import DOMAIN", "password = config_entry.data.get(CONF_PASSWORD) _LOGGER.info(\"Initializing the Lavviebot API\") lavviebot = await", "a config entry.\"\"\" username = config_entry.data.get(CONF_USERNAME) password = config_entry.data.get(CONF_PASSWORD) _LOGGER.info(\"Initializing", "to API\") hass.data[DOMAIN] = lavviebot hass.async_add_job( hass.config_entries.async_forward_entry_setup(config_entry, \"sensor\") ) return", "import DOMAIN _LOGGER = logging.getLogger(__name__) def setup(hass, config): \"\"\"Setup of", "from lavviebot import LavvieBotApi import homeassistant.helpers.config_validation as cv from homeassistant", "import logging import voluptuous as vol from lavviebot import LavvieBotApi", "lavviebot = await hass.async_add_executor_job(LavvieBotApi, username, password) _LOGGER.info(\"Connected to API\") hass.data[DOMAIN]", "config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.exceptions import ConfigEntryNotReady from", "S\"\"\" import asyncio import logging import voluptuous as vol from", "DOMAIN _LOGGER = logging.getLogger(__name__) def setup(hass, config): \"\"\"Setup of the", "= logging.getLogger(__name__) def setup(hass, config): \"\"\"Setup of the component\"\"\" return", "( CONF_PASSWORD, CONF_USERNAME ) from .const import DOMAIN _LOGGER =", "await hass.async_add_executor_job(LavvieBotApi, username, password) _LOGGER.info(\"Connected to API\") hass.data[DOMAIN] = lavviebot", "_LOGGER.info(\"Connected to API\") hass.data[DOMAIN] = lavviebot hass.async_add_job( hass.config_entries.async_forward_entry_setup(config_entry, \"sensor\") )", "homeassistant.exceptions import ConfigEntryNotReady from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME )", "as cv from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP", "config entry.\"\"\" username = config_entry.data.get(CONF_USERNAME) password = config_entry.data.get(CONF_PASSWORD) _LOGGER.info(\"Initializing the", "integration from a config entry.\"\"\" username = config_entry.data.get(CONF_USERNAME) password =", "config_entry): \"\"\"Set up Lavviebot integration from a config entry.\"\"\" username", "from a config entry.\"\"\" username = config_entry.data.get(CONF_USERNAME) password = config_entry.data.get(CONF_PASSWORD)", "= config_entry.data.get(CONF_USERNAME) password = config_entry.data.get(CONF_PASSWORD) _LOGGER.info(\"Initializing the Lavviebot API\") lavviebot", "the Lavviebot API\") lavviebot = await hass.async_add_executor_job(LavvieBotApi, username, password) _LOGGER.info(\"Connected", "\"\"\"Setup of the component\"\"\" return True async def async_setup_entry(hass, config_entry):", "Lavviebot API\") lavviebot = await hass.async_add_executor_job(LavvieBotApi, username, password) _LOGGER.info(\"Connected to", "= config_entry.data.get(CONF_PASSWORD) _LOGGER.info(\"Initializing the Lavviebot API\") lavviebot = await hass.async_add_executor_job(LavvieBotApi,", "the component\"\"\" return True async def async_setup_entry(hass, config_entry): \"\"\"Set up", "voluptuous as vol from lavviebot import LavvieBotApi import homeassistant.helpers.config_validation as", "CONF_USERNAME ) from .const import DOMAIN _LOGGER = logging.getLogger(__name__) def", "import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.exceptions import ConfigEntryNotReady", "_LOGGER = logging.getLogger(__name__) def setup(hass, config): \"\"\"Setup of the component\"\"\"", "import ConfigEntryNotReady from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME ) from", "config): \"\"\"Setup of the component\"\"\" return True async def async_setup_entry(hass,", "async_setup_entry(hass, config_entry): \"\"\"Set up Lavviebot integration from a config entry.\"\"\"", "setup(hass, config): \"\"\"Setup of the component\"\"\" return True async def", "_LOGGER.info(\"Initializing the Lavviebot API\") lavviebot = await hass.async_add_executor_job(LavvieBotApi, username, password)", "import asyncio import logging import voluptuous as vol from lavviebot", "CONF_PASSWORD, CONF_USERNAME ) from .const import DOMAIN _LOGGER = logging.getLogger(__name__)", "LavvieBot S\"\"\" import asyncio import logging import voluptuous as vol", "username = config_entry.data.get(CONF_USERNAME) password = config_entry.data.get(CONF_PASSWORD) _LOGGER.info(\"Initializing the Lavviebot API\")", ".const import DOMAIN _LOGGER = logging.getLogger(__name__) def setup(hass, config): \"\"\"Setup", "vol from lavviebot import LavvieBotApi import homeassistant.helpers.config_validation as cv from", "config_entry.data.get(CONF_USERNAME) password = config_entry.data.get(CONF_PASSWORD) _LOGGER.info(\"Initializing the Lavviebot API\") lavviebot =", "asyncio import logging import voluptuous as vol from lavviebot import", "from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.exceptions", "import LavvieBotApi import homeassistant.helpers.config_validation as cv from homeassistant import config_entries", "entry.\"\"\" username = config_entry.data.get(CONF_USERNAME) password = config_entry.data.get(CONF_PASSWORD) _LOGGER.info(\"Initializing the Lavviebot", "True async def async_setup_entry(hass, config_entry): \"\"\"Set up Lavviebot integration from", "API\") lavviebot = await hass.async_add_executor_job(LavvieBotApi, username, password) _LOGGER.info(\"Connected to API\")", "import EVENT_HOMEASSISTANT_STOP from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.const import (", "hass.async_add_executor_job(LavvieBotApi, username, password) _LOGGER.info(\"Connected to API\") hass.data[DOMAIN] = lavviebot hass.async_add_job(", "def async_setup_entry(hass, config_entry): \"\"\"Set up Lavviebot integration from a config", "of the component\"\"\" return True async def async_setup_entry(hass, config_entry): \"\"\"Set", "LavvieBotApi import homeassistant.helpers.config_validation as cv from homeassistant import config_entries from", "import ( CONF_PASSWORD, CONF_USERNAME ) from .const import DOMAIN _LOGGER", "logging.getLogger(__name__) def setup(hass, config): \"\"\"Setup of the component\"\"\" return True", "config_entry.data.get(CONF_PASSWORD) _LOGGER.info(\"Initializing the Lavviebot API\") lavviebot = await hass.async_add_executor_job(LavvieBotApi, username,", "username, password) _LOGGER.info(\"Connected to API\") hass.data[DOMAIN] = lavviebot hass.async_add_job( hass.config_entries.async_forward_entry_setup(config_entry,", "\"\"\"Support for Purrsong LavvieBot S\"\"\" import asyncio import logging import", "from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME", "ConfigEntryNotReady from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME ) from .const" ]
[ "canvas.data[self.x: self.x + self.height, self.y: self.y + self.width] = self.color", "side, color): self.x = x self.y = y self.side =", "values canvas.data[self.x: self.x + self.height, self.y: self.y + self.width] =", "color): self.x = x self.y = y self.side = side", "= x self.y = y self.width = width self.height =", "object\"\"\" # Changes a slice of the array with new", "self.color class Square: \"\"\"A square shape that can be drawn", "= y self.side = side self.color = color def draw(self,", "y self.side = side self.color = color def draw(self, canvas):", "self.width] = self.color class Square: \"\"\"A square shape that can", "self.y = y self.side = side self.color = color def", "= side self.color = color def draw(self, canvas): \"\"\"Draws itself", "def __init__(self, x, y, width, height, color): self.x = x", "= self.color class Square: \"\"\"A square shape that can be", "self.height = height self.color = color def draw(self, canvas): \"\"\"Draws", "\"\"\"A rectangle shape that can be drawn on a Canvas", "color def draw(self, canvas): \"\"\"Draws itself into the Canvas object\"\"\"", "color): self.x = x self.y = y self.width = width", "rectangle shape that can be drawn on a Canvas object\"\"\"", "slice of the array with new values canvas.data[self.x: self.x +", "y, width, height, color): self.x = x self.y = y", "Changes a slice of the array with new values canvas.data[self.x:", "a slice of the array with new values canvas.data[self.x: self.x", "x, y, side, color): self.x = x self.y = y", "# Changes a slice of the array with new values", "Canvas object\"\"\" def __init__(self, x, y, side, color): self.x =", "= color def draw(self, canvas): \"\"\"Draws itself into the Canvas", "self.color = color def draw(self, canvas): \"\"\"Draws itself into the", "be drawn on a Canvas object\"\"\" def __init__(self, x, y,", "x self.y = y self.width = width self.height = height", "Rectangle: \"\"\"A rectangle shape that can be drawn on a", "array with new values canvas.data[self.x: self.x + self.height, self.y: self.y", "width, height, color): self.x = x self.y = y self.width", "into the Canvas object\"\"\" # Changes a slice of the", "canvas): \"\"\"Draws itself into the Canvas object\"\"\" # Changes a", "the array with new values canvas.data[self.x: self.x + self.side, self.y:", "self.x = x self.y = y self.width = width self.height", "object\"\"\" def __init__(self, x, y, side, color): self.x = x", "a Canvas object\"\"\" def __init__(self, x, y, width, height, color):", "Square: \"\"\"A square shape that can be drawn on a", "drawn on a Canvas object\"\"\" def __init__(self, x, y, width,", "draw(self, canvas): \"\"\"Draws itself into the Canvas object\"\"\" # Changes", "height, color): self.x = x self.y = y self.width =", "y, side, color): self.x = x self.y = y self.side", "y self.width = width self.height = height self.color = color", "that can be drawn on a Canvas object\"\"\" def __init__(self,", "height self.color = color def draw(self, canvas): \"\"\"Draws itself into", "the array with new values canvas.data[self.x: self.x + self.height, self.y:", "def __init__(self, x, y, side, color): self.x = x self.y", "self.x + self.height, self.y: self.y + self.width] = self.color class", "self.x = x self.y = y self.side = side self.color", "on a Canvas object\"\"\" def __init__(self, x, y, side, color):", "class Rectangle: \"\"\"A rectangle shape that can be drawn on", "= y self.width = width self.height = height self.color =", "values canvas.data[self.x: self.x + self.side, self.y: self.y + self.side] =", "square shape that can be drawn on a Canvas object\"\"\"", "on a Canvas object\"\"\" def __init__(self, x, y, width, height,", "side self.color = color def draw(self, canvas): \"\"\"Draws itself into", "can be drawn on a Canvas object\"\"\" def __init__(self, x,", "with new values canvas.data[self.x: self.x + self.side, self.y: self.y +", "new values canvas.data[self.x: self.x + self.side, self.y: self.y + self.side]", "of the array with new values canvas.data[self.x: self.x + self.side,", "+ self.width] = self.color class Square: \"\"\"A square shape that", "__init__(self, x, y, side, color): self.x = x self.y =", "self.height, self.y: self.y + self.width] = self.color class Square: \"\"\"A", "self.side = side self.color = color def draw(self, canvas): \"\"\"Draws", "with new values canvas.data[self.x: self.x + self.height, self.y: self.y +", "x self.y = y self.side = side self.color = color", "+ self.height, self.y: self.y + self.width] = self.color class Square:", "itself into the Canvas object\"\"\" # Changes a slice of", "self.y + self.width] = self.color class Square: \"\"\"A square shape", "new values canvas.data[self.x: self.x + self.height, self.y: self.y + self.width]", "shape that can be drawn on a Canvas object\"\"\" def", "self.width = width self.height = height self.color = color def", "= x self.y = y self.side = side self.color =", "width self.height = height self.color = color def draw(self, canvas):", "def draw(self, canvas): \"\"\"Draws itself into the Canvas object\"\"\" #", "\"\"\"Draws itself into the Canvas object\"\"\" # Changes a slice", "self.y = y self.width = width self.height = height self.color", "__init__(self, x, y, width, height, color): self.x = x self.y", "= width self.height = height self.color = color def draw(self,", "x, y, width, height, color): self.x = x self.y =", "class Square: \"\"\"A square shape that can be drawn on", "a Canvas object\"\"\" def __init__(self, x, y, side, color): self.x", "of the array with new values canvas.data[self.x: self.x + self.height,", "drawn on a Canvas object\"\"\" def __init__(self, x, y, side,", "Canvas object\"\"\" def __init__(self, x, y, width, height, color): self.x", "array with new values canvas.data[self.x: self.x + self.side, self.y: self.y", "Canvas object\"\"\" # Changes a slice of the array with", "canvas.data[self.x: self.x + self.side, self.y: self.y + self.side] = self.color", "\"\"\"A square shape that can be drawn on a Canvas", "the Canvas object\"\"\" # Changes a slice of the array", "object\"\"\" def __init__(self, x, y, width, height, color): self.x =", "self.y: self.y + self.width] = self.color class Square: \"\"\"A square", "= height self.color = color def draw(self, canvas): \"\"\"Draws itself" ]
[ "indexing='ij') yp = 0.25 dens_1 = 2.0 dens_2 = 1.0", "dt increase':1.5, 'initial t': 0.0, 'max time': 5.0, 'save frequency':", "'numba', 'riemann':'hllc', 'reconstruction':'linear', 'limiter':'minmod', 'time stepping':'RK2', 'method':'hydro', 'lower x1 boundary':'reciprocal',", "+ vx2_per[vx2, region_2] def internal_bc(self): return None if __name__ ==", "= -vel_1 + vx1_per[vx1, region_2] V[vx2, region_2] = vel_2 +", "< yp region_2 = np.absolute(Y) > yp V[rho, region_1] =", "the internal boundary for the simulation. TODO ---- None \"\"\"", "yp = 0.25 dens_1 = 2.0 dens_2 = 1.0 pres", "region_2] def internal_bc(self): return None if __name__ == \"__main__\": main_loop(Problem())", "= 2.0 vel_1 = 0.5 vel_2 = 0.0 amp =", "if self.parameter['Dimensions'] == '2D': Y, X = np.meshgrid(g.x1, g.x2, indexing='ij')", "1)*amp region_1 = np.absolute(Y) < yp region_2 = np.absolute(Y) >", "region_1] V[vx2, region_1] = vel_2 + vx2_per[vx2, region_1] V[rho, region_2]", "'print to file':False, 'profiling': True, 'restart file':None, 'gamma':1.4, 'density unit':1.0,", "'cfl':0.3, 'initial dt':1.0e-5, 'max dt increase':1.5, 'initial t': 0.0, 'max", "t': 0.0, 'max time': 5.0, 'save frequency': 2.5e-2, 'output type':", "= 0.25 dens_1 = 2.0 dens_2 = 1.0 pres =", "+ vx1_per[vx1, region_1] V[vx2, region_1] = vel_2 + vx2_per[vx2, region_1]", "dens_2 V[prs, region_2] = pres V[vx1, region_2] = -vel_1 +", "the Kelvin-Helmholtz instability Args ---- None Methods ------- initialise Set", "None Methods ------- initialise Set all variables in each cell", "to file':False, 'profiling': True, 'restart file':None, 'gamma':1.4, 'density unit':1.0, 'length", "'lower x2 boundary':'reciprocal', 'upper x2 boundary':'reciprocal', 'lower x3 boundary':'reciprocal', 'upper", "= np.meshgrid(g.x1, g.x2, g.x3, indexing='ij') yp = 0.25 dens_1 =", "yp region_2 = np.absolute(Y) > yp V[rho, region_1] = dens_1", "def __init__(self): self.parameter = { 'Name':'<NAME> instability.', 'Dimensions':'2D', 'x1 min':-0.5,", "boundary':False } def initialise(self, V, g, l): if self.parameter['Dimensions'] ==", "max':0.5, 'x3 min':-0.5, 'x3 max':0.5, 'resolution x1':256, 'resolution x2':256, 'resolution", "dens_1 V[prs, region_1] = pres V[vx1, region_1] = vel_1 +", "vx2_per[vx2, region_2] def internal_bc(self): return None if __name__ == \"__main__\":", "= 0.5 vel_2 = 0.0 amp = 0.001 vx1_per =", "= np.absolute(Y) > yp V[rho, region_1] = dens_1 V[prs, region_1]", "g.x2, indexing='ij') if self.parameter['Dimensions'] == '3D': Z, Y, X =", "'internal boundary':False } def initialise(self, V, g, l): if self.parameter['Dimensions']", "= dens_2 V[prs, region_2] = pres V[vx1, region_2] = -vel_1", "V[vx2, region_2] = vel_2 + vx2_per[vx2, region_2] def internal_bc(self): return", "= (np.random.random(V.shape)*2.0 - 1)*amp vx2_per = (np.random.random(V.shape)*2.0 - 1)*amp region_1", "'x2 max':0.5, 'x3 min':-0.5, 'x3 max':0.5, 'resolution x1':256, 'resolution x2':256,", "import numpy as np from mars.settings import * class Problem:", "l): if self.parameter['Dimensions'] == '2D': Y, X = np.meshgrid(g.x1, g.x2,", "pres = 2.0 vel_1 = 0.5 vel_2 = 0.0 amp", "0.25 dens_1 = 2.0 dens_2 = 1.0 pres = 2.0", "---- None Methods ------- initialise Set all variables in each", "True, 'restart file':None, 'gamma':1.4, 'density unit':1.0, 'length unit':1.0, 'velocity unit':1.0,", "== '3D': Z, Y, X = np.meshgrid(g.x1, g.x2, g.x3, indexing='ij')", "'lower x3 boundary':'reciprocal', 'upper x3 boundary':'reciprocal', 'internal boundary':False } def", "'limiter':'minmod', 'time stepping':'RK2', 'method':'hydro', 'lower x1 boundary':'reciprocal', 'upper x1 boundary':'reciprocal',", "region_1] = dens_1 V[prs, region_1] = pres V[vx1, region_1] =", "'max time': 5.0, 'save frequency': 2.5e-2, 'output type': ['numpy'], 'output", "x3':0, 'cfl':0.3, 'initial dt':1.0e-5, 'max dt increase':1.5, 'initial t': 0.0,", "mars import main_loop import numpy as np from mars.settings import", "boundary':'reciprocal', 'upper x1 boundary':'reciprocal', 'lower x2 boundary':'reciprocal', 'upper x2 boundary':'reciprocal',", "Y, X = np.meshgrid(g.x1, g.x2, indexing='ij') if self.parameter['Dimensions'] == '3D':", "min':-0.5, 'x1 max':0.5, 'x2 min':-0.5, 'x2 max':0.5, 'x3 min':-0.5, 'x3", "Kelvin-Helmholtz instability Args ---- None Methods ------- initialise Set all", "x2':256, 'resolution x3':0, 'cfl':0.3, 'initial dt':1.0e-5, 'max dt increase':1.5, 'initial", "vx2_per = (np.random.random(V.shape)*2.0 - 1)*amp region_1 = np.absolute(Y) < yp", "for the simulation. TODO ---- None \"\"\" def __init__(self): self.parameter", "min':-0.5, 'x2 max':0.5, 'x3 min':-0.5, 'x3 max':0.5, 'resolution x1':256, 'resolution", "True, 'print to file':False, 'profiling': True, 'restart file':None, 'gamma':1.4, 'density", "region_1] = vel_2 + vx2_per[vx2, region_1] V[rho, region_2] = dens_2", "'optimisation': 'numba', 'riemann':'hllc', 'reconstruction':'linear', 'limiter':'minmod', 'time stepping':'RK2', 'method':'hydro', 'lower x1", "0.5 vel_2 = 0.0 amp = 0.001 vx1_per = (np.random.random(V.shape)*2.0", "> yp V[rho, region_1] = dens_1 V[prs, region_1] = pres", "instability.', 'Dimensions':'2D', 'x1 min':-0.5, 'x1 max':0.5, 'x2 min':-0.5, 'x2 max':0.5,", "V[vx1, region_2] = -vel_1 + vx1_per[vx1, region_2] V[vx2, region_2] =", "X = np.meshgrid(g.x1, g.x2, g.x3, indexing='ij') yp = 0.25 dens_1", "V[rho, region_1] = dens_1 V[prs, region_1] = pres V[vx1, region_1]", "TODO ---- None \"\"\" def __init__(self): self.parameter = { 'Name':'<NAME>", "initialise(self, V, g, l): if self.parameter['Dimensions'] == '2D': Y, X", "V, g, l): if self.parameter['Dimensions'] == '2D': Y, X =", "x2 boundary':'reciprocal', 'upper x2 boundary':'reciprocal', 'lower x3 boundary':'reciprocal', 'upper x3", "boundary':'reciprocal', 'upper x2 boundary':'reciprocal', 'lower x3 boundary':'reciprocal', 'upper x3 boundary':'reciprocal',", "each cell to initialise the simulation. internal_bc Specify the internal", "def initialise(self, V, g, l): if self.parameter['Dimensions'] == '2D': Y,", "min':-0.5, 'x3 max':0.5, 'resolution x1':256, 'resolution x2':256, 'resolution x3':0, 'cfl':0.3,", "= vel_2 + vx2_per[vx2, region_2] def internal_bc(self): return None if", "1.0 pres = 2.0 vel_1 = 0.5 vel_2 = 0.0", "vx1_per[vx1, region_2] V[vx2, region_2] = vel_2 + vx2_per[vx2, region_2] def", "region_1] = vel_1 + vx1_per[vx1, region_1] V[vx2, region_1] = vel_2", "V[vx1, region_1] = vel_1 + vx1_per[vx1, region_1] V[vx2, region_1] =", "['numpy'], 'output primitives': True, 'print to file':False, 'profiling': True, 'restart", "region_1] = pres V[vx1, region_1] = vel_1 + vx1_per[vx1, region_1]", "= pres V[vx1, region_2] = -vel_1 + vx1_per[vx1, region_2] V[vx2,", "V[vx2, region_1] = vel_2 + vx2_per[vx2, region_1] V[rho, region_2] =", "self.parameter = { 'Name':'<NAME> instability.', 'Dimensions':'2D', 'x1 min':-0.5, 'x1 max':0.5,", "region_2] = dens_2 V[prs, region_2] = pres V[vx1, region_2] =", "internal_bc Specify the internal boundary for the simulation. TODO ----", "Specify the internal boundary for the simulation. TODO ---- None", "x1 boundary':'reciprocal', 'upper x1 boundary':'reciprocal', 'lower x2 boundary':'reciprocal', 'upper x2", "'Dimensions':'2D', 'x1 min':-0.5, 'x1 max':0.5, 'x2 min':-0.5, 'x2 max':0.5, 'x3", "'gamma':1.4, 'density unit':1.0, 'length unit':1.0, 'velocity unit':1.0, 'optimisation': 'numba', 'riemann':'hllc',", "from mars import main_loop import numpy as np from mars.settings", "type': ['numpy'], 'output primitives': True, 'print to file':False, 'profiling': True,", "unit':1.0, 'velocity unit':1.0, 'optimisation': 'numba', 'riemann':'hllc', 'reconstruction':'linear', 'limiter':'minmod', 'time stepping':'RK2',", "vel_2 + vx2_per[vx2, region_1] V[rho, region_2] = dens_2 V[prs, region_2]", "= vel_1 + vx1_per[vx1, region_1] V[vx2, region_1] = vel_2 +", "stepping':'RK2', 'method':'hydro', 'lower x1 boundary':'reciprocal', 'upper x1 boundary':'reciprocal', 'lower x2", "= 0.0 amp = 0.001 vx1_per = (np.random.random(V.shape)*2.0 - 1)*amp", "vx1_per[vx1, region_1] V[vx2, region_1] = vel_2 + vx2_per[vx2, region_1] V[rho,", "primitives': True, 'print to file':False, 'profiling': True, 'restart file':None, 'gamma':1.4,", "dt':1.0e-5, 'max dt increase':1.5, 'initial t': 0.0, 'max time': 5.0,", "= (np.random.random(V.shape)*2.0 - 1)*amp region_1 = np.absolute(Y) < yp region_2", "'2D': Y, X = np.meshgrid(g.x1, g.x2, indexing='ij') if self.parameter['Dimensions'] ==", "= 1.0 pres = 2.0 vel_1 = 0.5 vel_2 =", "max':0.5, 'x2 min':-0.5, 'x2 max':0.5, 'x3 min':-0.5, 'x3 max':0.5, 'resolution", "class for the Kelvin-Helmholtz instability Args ---- None Methods -------", "Y, X = np.meshgrid(g.x1, g.x2, g.x3, indexing='ij') yp = 0.25", "0.001 vx1_per = (np.random.random(V.shape)*2.0 - 1)*amp vx2_per = (np.random.random(V.shape)*2.0 -", "the simulation. TODO ---- None \"\"\" def __init__(self): self.parameter =", "np from mars.settings import * class Problem: \"\"\" Synopsis --------", "self.parameter['Dimensions'] == '3D': Z, Y, X = np.meshgrid(g.x1, g.x2, g.x3,", "file':False, 'profiling': True, 'restart file':None, 'gamma':1.4, 'density unit':1.0, 'length unit':1.0,", "User class for the Kelvin-Helmholtz instability Args ---- None Methods", "instability Args ---- None Methods ------- initialise Set all variables", "import * class Problem: \"\"\" Synopsis -------- User class for", "'length unit':1.0, 'velocity unit':1.0, 'optimisation': 'numba', 'riemann':'hllc', 'reconstruction':'linear', 'limiter':'minmod', 'time", "= np.absolute(Y) < yp region_2 = np.absolute(Y) > yp V[rho,", "g, l): if self.parameter['Dimensions'] == '2D': Y, X = np.meshgrid(g.x1,", "2.0 vel_1 = 0.5 vel_2 = 0.0 amp = 0.001", "pres V[vx1, region_1] = vel_1 + vx1_per[vx1, region_1] V[vx2, region_1]", "yp V[rho, region_1] = dens_1 V[prs, region_1] = pres V[vx1,", "'resolution x1':256, 'resolution x2':256, 'resolution x3':0, 'cfl':0.3, 'initial dt':1.0e-5, 'max", "unit':1.0, 'length unit':1.0, 'velocity unit':1.0, 'optimisation': 'numba', 'riemann':'hllc', 'reconstruction':'linear', 'limiter':'minmod',", "= vel_2 + vx2_per[vx2, region_1] V[rho, region_2] = dens_2 V[prs,", "X = np.meshgrid(g.x1, g.x2, indexing='ij') if self.parameter['Dimensions'] == '3D': Z,", "= 0.001 vx1_per = (np.random.random(V.shape)*2.0 - 1)*amp vx2_per = (np.random.random(V.shape)*2.0", "x1 boundary':'reciprocal', 'lower x2 boundary':'reciprocal', 'upper x2 boundary':'reciprocal', 'lower x3", "Synopsis -------- User class for the Kelvin-Helmholtz instability Args ----", "---- None \"\"\" def __init__(self): self.parameter = { 'Name':'<NAME> instability.',", "region_2 = np.absolute(Y) > yp V[rho, region_1] = dens_1 V[prs,", "'initial t': 0.0, 'max time': 5.0, 'save frequency': 2.5e-2, 'output", "class Problem: \"\"\" Synopsis -------- User class for the Kelvin-Helmholtz", "unit':1.0, 'optimisation': 'numba', 'riemann':'hllc', 'reconstruction':'linear', 'limiter':'minmod', 'time stepping':'RK2', 'method':'hydro', 'lower", "'lower x1 boundary':'reciprocal', 'upper x1 boundary':'reciprocal', 'lower x2 boundary':'reciprocal', 'upper", "np.meshgrid(g.x1, g.x2, g.x3, indexing='ij') yp = 0.25 dens_1 = 2.0", "self.parameter['Dimensions'] == '2D': Y, X = np.meshgrid(g.x1, g.x2, indexing='ij') if", "region_1 = np.absolute(Y) < yp region_2 = np.absolute(Y) > yp", "'resolution x3':0, 'cfl':0.3, 'initial dt':1.0e-5, 'max dt increase':1.5, 'initial t':", "mars.settings import * class Problem: \"\"\" Synopsis -------- User class", "Set all variables in each cell to initialise the simulation.", "V[rho, region_2] = dens_2 V[prs, region_2] = pres V[vx1, region_2]", "vel_1 + vx1_per[vx1, region_1] V[vx2, region_1] = vel_2 + vx2_per[vx2,", "internal boundary for the simulation. TODO ---- None \"\"\" def", "initialise Set all variables in each cell to initialise the", "region_2] V[vx2, region_2] = vel_2 + vx2_per[vx2, region_2] def internal_bc(self):", "x2 boundary':'reciprocal', 'lower x3 boundary':'reciprocal', 'upper x3 boundary':'reciprocal', 'internal boundary':False", "'upper x3 boundary':'reciprocal', 'internal boundary':False } def initialise(self, V, g,", "region_2] = vel_2 + vx2_per[vx2, region_2] def internal_bc(self): return None", "(np.random.random(V.shape)*2.0 - 1)*amp vx2_per = (np.random.random(V.shape)*2.0 - 1)*amp region_1 =", "-------- User class for the Kelvin-Helmholtz instability Args ---- None", "+ vx2_per[vx2, region_1] V[rho, region_2] = dens_2 V[prs, region_2] =", "dens_1 = 2.0 dens_2 = 1.0 pres = 2.0 vel_1", "'resolution x2':256, 'resolution x3':0, 'cfl':0.3, 'initial dt':1.0e-5, 'max dt increase':1.5,", "dens_2 = 1.0 pres = 2.0 vel_1 = 0.5 vel_2", "from mars.settings import * class Problem: \"\"\" Synopsis -------- User", "{ 'Name':'<NAME> instability.', 'Dimensions':'2D', 'x1 min':-0.5, 'x1 max':0.5, 'x2 min':-0.5,", "x3 boundary':'reciprocal', 'internal boundary':False } def initialise(self, V, g, l):", "= dens_1 V[prs, region_1] = pres V[vx1, region_1] = vel_1", "region_2] = -vel_1 + vx1_per[vx1, region_2] V[vx2, region_2] = vel_2", "Problem: \"\"\" Synopsis -------- User class for the Kelvin-Helmholtz instability", "initialise the simulation. internal_bc Specify the internal boundary for the", "} def initialise(self, V, g, l): if self.parameter['Dimensions'] == '2D':", "* class Problem: \"\"\" Synopsis -------- User class for the", "simulation. internal_bc Specify the internal boundary for the simulation. TODO", "vel_2 = 0.0 amp = 0.001 vx1_per = (np.random.random(V.shape)*2.0 -", "vx1_per = (np.random.random(V.shape)*2.0 - 1)*amp vx2_per = (np.random.random(V.shape)*2.0 - 1)*amp", "-vel_1 + vx1_per[vx1, region_2] V[vx2, region_2] = vel_2 + vx2_per[vx2,", "'Name':'<NAME> instability.', 'Dimensions':'2D', 'x1 min':-0.5, 'x1 max':0.5, 'x2 min':-0.5, 'x2", "in each cell to initialise the simulation. internal_bc Specify the", "file':None, 'gamma':1.4, 'density unit':1.0, 'length unit':1.0, 'velocity unit':1.0, 'optimisation': 'numba',", "np.meshgrid(g.x1, g.x2, indexing='ij') if self.parameter['Dimensions'] == '3D': Z, Y, X", "g.x2, g.x3, indexing='ij') yp = 0.25 dens_1 = 2.0 dens_2", "'x1 max':0.5, 'x2 min':-0.5, 'x2 max':0.5, 'x3 min':-0.5, 'x3 max':0.5,", "== '2D': Y, X = np.meshgrid(g.x1, g.x2, indexing='ij') if self.parameter['Dimensions']", "x1':256, 'resolution x2':256, 'resolution x3':0, 'cfl':0.3, 'initial dt':1.0e-5, 'max dt", "Z, Y, X = np.meshgrid(g.x1, g.x2, g.x3, indexing='ij') yp =", "import main_loop import numpy as np from mars.settings import *", "pres V[vx1, region_2] = -vel_1 + vx1_per[vx1, region_2] V[vx2, region_2]", "all variables in each cell to initialise the simulation. internal_bc", "'velocity unit':1.0, 'optimisation': 'numba', 'riemann':'hllc', 'reconstruction':'linear', 'limiter':'minmod', 'time stepping':'RK2', 'method':'hydro',", "frequency': 2.5e-2, 'output type': ['numpy'], 'output primitives': True, 'print to", "the simulation. internal_bc Specify the internal boundary for the simulation.", "= np.meshgrid(g.x1, g.x2, indexing='ij') if self.parameter['Dimensions'] == '3D': Z, Y,", "np.absolute(Y) < yp region_2 = np.absolute(Y) > yp V[rho, region_1]", "main_loop import numpy as np from mars.settings import * class", "Args ---- None Methods ------- initialise Set all variables in", "__init__(self): self.parameter = { 'Name':'<NAME> instability.', 'Dimensions':'2D', 'x1 min':-0.5, 'x1", "'x1 min':-0.5, 'x1 max':0.5, 'x2 min':-0.5, 'x2 max':0.5, 'x3 min':-0.5,", "2.5e-2, 'output type': ['numpy'], 'output primitives': True, 'print to file':False,", "numpy as np from mars.settings import * class Problem: \"\"\"", "boundary for the simulation. TODO ---- None \"\"\" def __init__(self):", "'riemann':'hllc', 'reconstruction':'linear', 'limiter':'minmod', 'time stepping':'RK2', 'method':'hydro', 'lower x1 boundary':'reciprocal', 'upper", "'time stepping':'RK2', 'method':'hydro', 'lower x1 boundary':'reciprocal', 'upper x1 boundary':'reciprocal', 'lower", "'method':'hydro', 'lower x1 boundary':'reciprocal', 'upper x1 boundary':'reciprocal', 'lower x2 boundary':'reciprocal',", "time': 5.0, 'save frequency': 2.5e-2, 'output type': ['numpy'], 'output primitives':", "1)*amp vx2_per = (np.random.random(V.shape)*2.0 - 1)*amp region_1 = np.absolute(Y) <", "'output type': ['numpy'], 'output primitives': True, 'print to file':False, 'profiling':", "(np.random.random(V.shape)*2.0 - 1)*amp region_1 = np.absolute(Y) < yp region_2 =", "'upper x1 boundary':'reciprocal', 'lower x2 boundary':'reciprocal', 'upper x2 boundary':'reciprocal', 'lower", "------- initialise Set all variables in each cell to initialise", "V[prs, region_1] = pres V[vx1, region_1] = vel_1 + vx1_per[vx1,", "'3D': Z, Y, X = np.meshgrid(g.x1, g.x2, g.x3, indexing='ij') yp", "np.absolute(Y) > yp V[rho, region_1] = dens_1 V[prs, region_1] =", "boundary':'reciprocal', 'lower x3 boundary':'reciprocal', 'upper x3 boundary':'reciprocal', 'internal boundary':False }", "indexing='ij') if self.parameter['Dimensions'] == '3D': Z, Y, X = np.meshgrid(g.x1,", "g.x3, indexing='ij') yp = 0.25 dens_1 = 2.0 dens_2 =", "\"\"\" Synopsis -------- User class for the Kelvin-Helmholtz instability Args", "region_1] V[rho, region_2] = dens_2 V[prs, region_2] = pres V[vx1,", "variables in each cell to initialise the simulation. internal_bc Specify", "- 1)*amp vx2_per = (np.random.random(V.shape)*2.0 - 1)*amp region_1 = np.absolute(Y)", "boundary':'reciprocal', 'upper x3 boundary':'reciprocal', 'internal boundary':False } def initialise(self, V,", "vel_2 + vx2_per[vx2, region_2] def internal_bc(self): return None if __name__", "region_2] = pres V[vx1, region_2] = -vel_1 + vx1_per[vx1, region_2]", "for the Kelvin-Helmholtz instability Args ---- None Methods ------- initialise", "to initialise the simulation. internal_bc Specify the internal boundary for", "None \"\"\" def __init__(self): self.parameter = { 'Name':'<NAME> instability.', 'Dimensions':'2D',", "\"\"\" def __init__(self): self.parameter = { 'Name':'<NAME> instability.', 'Dimensions':'2D', 'x1", "'x3 min':-0.5, 'x3 max':0.5, 'resolution x1':256, 'resolution x2':256, 'resolution x3':0,", "boundary':'reciprocal', 'internal boundary':False } def initialise(self, V, g, l): if", "2.0 dens_2 = 1.0 pres = 2.0 vel_1 = 0.5", "- 1)*amp region_1 = np.absolute(Y) < yp region_2 = np.absolute(Y)", "'upper x2 boundary':'reciprocal', 'lower x3 boundary':'reciprocal', 'upper x3 boundary':'reciprocal', 'internal", "if self.parameter['Dimensions'] == '3D': Z, Y, X = np.meshgrid(g.x1, g.x2,", "0.0, 'max time': 5.0, 'save frequency': 2.5e-2, 'output type': ['numpy'],", "'save frequency': 2.5e-2, 'output type': ['numpy'], 'output primitives': True, 'print", "'output primitives': True, 'print to file':False, 'profiling': True, 'restart file':None,", "vel_1 = 0.5 vel_2 = 0.0 amp = 0.001 vx1_per", "'reconstruction':'linear', 'limiter':'minmod', 'time stepping':'RK2', 'method':'hydro', 'lower x1 boundary':'reciprocal', 'upper x1", "'x3 max':0.5, 'resolution x1':256, 'resolution x2':256, 'resolution x3':0, 'cfl':0.3, 'initial", "amp = 0.001 vx1_per = (np.random.random(V.shape)*2.0 - 1)*amp vx2_per =", "5.0, 'save frequency': 2.5e-2, 'output type': ['numpy'], 'output primitives': True,", "'density unit':1.0, 'length unit':1.0, 'velocity unit':1.0, 'optimisation': 'numba', 'riemann':'hllc', 'reconstruction':'linear',", "'initial dt':1.0e-5, 'max dt increase':1.5, 'initial t': 0.0, 'max time':", "increase':1.5, 'initial t': 0.0, 'max time': 5.0, 'save frequency': 2.5e-2,", "x3 boundary':'reciprocal', 'upper x3 boundary':'reciprocal', 'internal boundary':False } def initialise(self,", "cell to initialise the simulation. internal_bc Specify the internal boundary", "max':0.5, 'resolution x1':256, 'resolution x2':256, 'resolution x3':0, 'cfl':0.3, 'initial dt':1.0e-5,", "'max dt increase':1.5, 'initial t': 0.0, 'max time': 5.0, 'save", "'profiling': True, 'restart file':None, 'gamma':1.4, 'density unit':1.0, 'length unit':1.0, 'velocity", "V[prs, region_2] = pres V[vx1, region_2] = -vel_1 + vx1_per[vx1,", "0.0 amp = 0.001 vx1_per = (np.random.random(V.shape)*2.0 - 1)*amp vx2_per", "'restart file':None, 'gamma':1.4, 'density unit':1.0, 'length unit':1.0, 'velocity unit':1.0, 'optimisation':", "vx2_per[vx2, region_1] V[rho, region_2] = dens_2 V[prs, region_2] = pres", "= pres V[vx1, region_1] = vel_1 + vx1_per[vx1, region_1] V[vx2,", "+ vx1_per[vx1, region_2] V[vx2, region_2] = vel_2 + vx2_per[vx2, region_2]", "as np from mars.settings import * class Problem: \"\"\" Synopsis", "= { 'Name':'<NAME> instability.', 'Dimensions':'2D', 'x1 min':-0.5, 'x1 max':0.5, 'x2", "'x2 min':-0.5, 'x2 max':0.5, 'x3 min':-0.5, 'x3 max':0.5, 'resolution x1':256,", "boundary':'reciprocal', 'lower x2 boundary':'reciprocal', 'upper x2 boundary':'reciprocal', 'lower x3 boundary':'reciprocal',", "= 2.0 dens_2 = 1.0 pres = 2.0 vel_1 =", "simulation. TODO ---- None \"\"\" def __init__(self): self.parameter = {", "Methods ------- initialise Set all variables in each cell to" ]
[ "ch_val >= 3584 and ch_val <= 3711: return True return", "in ignore_chars: num_ignore += 1 elif isthaichar(ch): num_thai += 1", "be ignored (i.e. will be considered as Thai) :return: True", "# -*- coding: utf-8 -*- \"\"\" Check if it is", "is Thai text \"\"\" import string _DEFAULT_IGNORE_CHARS = string.whitespace +", "is Thai เป็นอักษรไทยหรือไม่ :param str ch: input character :return: True", "เป็นอักษรไทยหรือไม่ :param str ch: input character :return: True or False", "0 if not ignore_chars: ignore_chars = \"\" num_thai = 0", "if ch_val >= 3584 and ch_val <= 3711: return True", ":param str ignore_chars: characters to be ignored (i.e. will be", "= 0 for ch in text: if ch in ignore_chars:", "3711: return True return False def isthai(word: str, ignore_chars: str", "= \"\" num_thai = 0 num_ignore = 0 for ch", "text: input text :return: float, proportion of characters in the", "character \"\"\" if not text or not isinstance(text, str): return", "def isthaichar(ch: str) -> bool: \"\"\" Check if a character", "be considered as Thai) :return: True or False \"\"\" if", "text :param str ignore_chars: characters to be ignored (i.e. will", "return 0 if not ignore_chars: ignore_chars = \"\" num_thai =", "Thai เป็นคำที่มีแต่อักษรไทยหรือไม่ :param str word: input text :param str ignore_chars:", "str, ignore_chars: str = \".\") -> bool: \"\"\" Check if", "_DEFAULT_IGNORE_CHARS = string.whitespace + string.digits + string.punctuation def isthaichar(ch: str)", "\"\"\" Check if a character is Thai เป็นอักษรไทยหรือไม่ :param str", "\"\"\" ch_val = ord(ch) if ch_val >= 3584 and ch_val", "0 for ch in text: if ch in ignore_chars: num_ignore", "coding: utf-8 -*- \"\"\" Check if it is Thai text", "ch not in ignore_chars and not isthaichar(ch): return False return", "not isinstance(text, str): return 0 if not ignore_chars: ignore_chars =", "True def countthai(text: str, ignore_chars: str = _DEFAULT_IGNORE_CHARS) -> float:", "proportion of characters in the text that is Thai character", "ch in ignore_chars: num_ignore += 1 elif isthaichar(ch): num_thai +=", "will be considered as Thai) :return: True or False \"\"\"", "num_thai = 0 num_ignore = 0 for ch in text:", "(i.e. will be considered as Thai) :return: True or False", "-> bool: \"\"\" Check if all character is Thai เป็นคำที่มีแต่อักษรไทยหรือไม่", "ch in word: if ch not in ignore_chars and not", "return False return True def countthai(text: str, ignore_chars: str =", "for ch in text: if ch in ignore_chars: num_ignore +=", "isthaichar(ch): num_thai += 1 num_count = len(text) - num_ignore return", "if ch in ignore_chars: num_ignore += 1 elif isthaichar(ch): num_thai", "\"\" num_thai = 0 num_ignore = 0 for ch in", "True or False \"\"\" if not ignore_chars: ignore_chars = \"\"", "\"\"\" :param str text: input text :return: float, proportion of", "in word: if ch not in ignore_chars and not isthaichar(ch):", "False return True def countthai(text: str, ignore_chars: str = _DEFAULT_IGNORE_CHARS)", ":return: True or False \"\"\" if not ignore_chars: ignore_chars =", "characters in the text that is Thai character \"\"\" if", "\"\"\" Check if it is Thai text \"\"\" import string", "= string.whitespace + string.digits + string.punctuation def isthaichar(ch: str) ->", "float: \"\"\" :param str text: input text :return: float, proportion", "bool: \"\"\" Check if all character is Thai เป็นคำที่มีแต่อักษรไทยหรือไม่ :param", "\"\"\" if not ignore_chars: ignore_chars = \"\" for ch in", "return False def isthai(word: str, ignore_chars: str = \".\") ->", "is Thai เป็นคำที่มีแต่อักษรไทยหรือไม่ :param str word: input text :param str", "or False \"\"\" if not ignore_chars: ignore_chars = \"\" for", "character :return: True or False \"\"\" ch_val = ord(ch) if", "text :return: float, proportion of characters in the text that", "utf-8 -*- \"\"\" Check if it is Thai text \"\"\"", "เป็นคำที่มีแต่อักษรไทยหรือไม่ :param str word: input text :param str ignore_chars: characters", "def countthai(text: str, ignore_chars: str = _DEFAULT_IGNORE_CHARS) -> float: \"\"\"", "Check if all character is Thai เป็นคำที่มีแต่อักษรไทยหรือไม่ :param str word:", "or not isinstance(text, str): return 0 if not ignore_chars: ignore_chars", "ignore_chars: str = _DEFAULT_IGNORE_CHARS) -> float: \"\"\" :param str text:", "1 num_count = len(text) - num_ignore return (num_thai / num_count)", "input text :return: float, proportion of characters in the text", "ignore_chars and not isthaichar(ch): return False return True def countthai(text:", "+= 1 elif isthaichar(ch): num_thai += 1 num_count = len(text)", "string.whitespace + string.digits + string.punctuation def isthaichar(ch: str) -> bool:", "str text: input text :return: float, proportion of characters in", "string _DEFAULT_IGNORE_CHARS = string.whitespace + string.digits + string.punctuation def isthaichar(ch:", "if not ignore_chars: ignore_chars = \"\" num_thai = 0 num_ignore", "-*- coding: utf-8 -*- \"\"\" Check if it is Thai", "ch_val = ord(ch) if ch_val >= 3584 and ch_val <=", "is Thai character \"\"\" if not text or not isinstance(text,", "_DEFAULT_IGNORE_CHARS) -> float: \"\"\" :param str text: input text :return:", "num_count = len(text) - num_ignore return (num_thai / num_count) *", "-> float: \"\"\" :param str text: input text :return: float,", "return True def countthai(text: str, ignore_chars: str = _DEFAULT_IGNORE_CHARS) ->", "considered as Thai) :return: True or False \"\"\" if not", "as Thai) :return: True or False \"\"\" if not ignore_chars:", "str) -> bool: \"\"\" Check if a character is Thai", "ignore_chars: str = \".\") -> bool: \"\"\" Check if all", "if not ignore_chars: ignore_chars = \"\" for ch in word:", "str, ignore_chars: str = _DEFAULT_IGNORE_CHARS) -> float: \"\"\" :param str", "num_thai += 1 num_count = len(text) - num_ignore return (num_thai", "input character :return: True or False \"\"\" ch_val = ord(ch)", "= 0 num_ignore = 0 for ch in text: if", "if all character is Thai เป็นคำที่มีแต่อักษรไทยหรือไม่ :param str word: input", "num_ignore = 0 for ch in text: if ch in", "3584 and ch_val <= 3711: return True return False def", "isthaichar(ch: str) -> bool: \"\"\" Check if a character is", "ch_val <= 3711: return True return False def isthai(word: str,", "countthai(text: str, ignore_chars: str = _DEFAULT_IGNORE_CHARS) -> float: \"\"\" :param", ">= 3584 and ch_val <= 3711: return True return False", "that is Thai character \"\"\" if not text or not", "<= 3711: return True return False def isthai(word: str, ignore_chars:", "a character is Thai เป็นอักษรไทยหรือไม่ :param str ch: input character", "ord(ch) if ch_val >= 3584 and ch_val <= 3711: return", "+ string.digits + string.punctuation def isthaichar(ch: str) -> bool: \"\"\"", "of characters in the text that is Thai character \"\"\"", "characters to be ignored (i.e. will be considered as Thai)", "False \"\"\" if not ignore_chars: ignore_chars = \"\" for ch", ":param str ch: input character :return: True or False \"\"\"", "not isthaichar(ch): return False return True def countthai(text: str, ignore_chars:", "isinstance(text, str): return 0 if not ignore_chars: ignore_chars = \"\"", "character is Thai เป็นคำที่มีแต่อักษรไทยหรือไม่ :param str word: input text :param", "str = _DEFAULT_IGNORE_CHARS) -> float: \"\"\" :param str text: input", "Check if a character is Thai เป็นอักษรไทยหรือไม่ :param str ch:", "ignore_chars: ignore_chars = \"\" for ch in word: if ch", "not in ignore_chars and not isthaichar(ch): return False return True", "text: if ch in ignore_chars: num_ignore += 1 elif isthaichar(ch):", "text \"\"\" import string _DEFAULT_IGNORE_CHARS = string.whitespace + string.digits +", "= ord(ch) if ch_val >= 3584 and ch_val <= 3711:", "True or False \"\"\" ch_val = ord(ch) if ch_val >=", "string.punctuation def isthaichar(ch: str) -> bool: \"\"\" Check if a", "if not text or not isinstance(text, str): return 0 if", "\"\"\" import string _DEFAULT_IGNORE_CHARS = string.whitespace + string.digits + string.punctuation", "to be ignored (i.e. will be considered as Thai) :return:", "character is Thai เป็นอักษรไทยหรือไม่ :param str ch: input character :return:", "+= 1 num_count = len(text) - num_ignore return (num_thai /", "in the text that is Thai character \"\"\" if not", "return True return False def isthai(word: str, ignore_chars: str =", "ch in text: if ch in ignore_chars: num_ignore += 1", "if it is Thai text \"\"\" import string _DEFAULT_IGNORE_CHARS =", "the text that is Thai character \"\"\" if not text", "string.digits + string.punctuation def isthaichar(ch: str) -> bool: \"\"\" Check", "Thai text \"\"\" import string _DEFAULT_IGNORE_CHARS = string.whitespace + string.digits", "if ch not in ignore_chars and not isthaichar(ch): return False", "\".\") -> bool: \"\"\" Check if all character is Thai", "str ch: input character :return: True or False \"\"\" ch_val", "Thai เป็นอักษรไทยหรือไม่ :param str ch: input character :return: True or", "1 elif isthaichar(ch): num_thai += 1 num_count = len(text) -", "bool: \"\"\" Check if a character is Thai เป็นอักษรไทยหรือไม่ :param", "not text or not isinstance(text, str): return 0 if not", "False \"\"\" ch_val = ord(ch) if ch_val >= 3584 and", "= \"\" for ch in word: if ch not in", "ignored (i.e. will be considered as Thai) :return: True or", "isthai(word: str, ignore_chars: str = \".\") -> bool: \"\"\" Check", ":param str word: input text :param str ignore_chars: characters to", "Check if it is Thai text \"\"\" import string _DEFAULT_IGNORE_CHARS", "0 num_ignore = 0 for ch in text: if ch", "ignore_chars = \"\" num_thai = 0 num_ignore = 0 for", "Thai) :return: True or False \"\"\" if not ignore_chars: ignore_chars", "word: if ch not in ignore_chars and not isthaichar(ch): return", "or False \"\"\" ch_val = ord(ch) if ch_val >= 3584", "input text :param str ignore_chars: characters to be ignored (i.e.", ":param str text: input text :return: float, proportion of characters", ":return: True or False \"\"\" ch_val = ord(ch) if ch_val", "\"\"\" if not text or not isinstance(text, str): return 0", ":return: float, proportion of characters in the text that is", "False def isthai(word: str, ignore_chars: str = \".\") -> bool:", "ignore_chars: characters to be ignored (i.e. will be considered as", "= len(text) - num_ignore return (num_thai / num_count) * 100", "in text: if ch in ignore_chars: num_ignore += 1 elif", "num_ignore += 1 elif isthaichar(ch): num_thai += 1 num_count =", "if a character is Thai เป็นอักษรไทยหรือไม่ :param str ch: input", "ignore_chars: ignore_chars = \"\" num_thai = 0 num_ignore = 0", "\"\" for ch in word: if ch not in ignore_chars", "isthaichar(ch): return False return True def countthai(text: str, ignore_chars: str", "= _DEFAULT_IGNORE_CHARS) -> float: \"\"\" :param str text: input text", "text or not isinstance(text, str): return 0 if not ignore_chars:", "Thai character \"\"\" if not text or not isinstance(text, str):", "+ string.punctuation def isthaichar(ch: str) -> bool: \"\"\" Check if", "and ch_val <= 3711: return True return False def isthai(word:", "float, proportion of characters in the text that is Thai", "in ignore_chars and not isthaichar(ch): return False return True def", "elif isthaichar(ch): num_thai += 1 num_count = len(text) - num_ignore", "ch: input character :return: True or False \"\"\" ch_val =", "str word: input text :param str ignore_chars: characters to be", "-> bool: \"\"\" Check if a character is Thai เป็นอักษรไทยหรือไม่", "and not isthaichar(ch): return False return True def countthai(text: str,", "it is Thai text \"\"\" import string _DEFAULT_IGNORE_CHARS = string.whitespace", "str = \".\") -> bool: \"\"\" Check if all character", "= \".\") -> bool: \"\"\" Check if all character is", "str): return 0 if not ignore_chars: ignore_chars = \"\" num_thai", "ignore_chars = \"\" for ch in word: if ch not", "str ignore_chars: characters to be ignored (i.e. will be considered", "import string _DEFAULT_IGNORE_CHARS = string.whitespace + string.digits + string.punctuation def", "all character is Thai เป็นคำที่มีแต่อักษรไทยหรือไม่ :param str word: input text", "def isthai(word: str, ignore_chars: str = \".\") -> bool: \"\"\"", "True return False def isthai(word: str, ignore_chars: str = \".\")", "text that is Thai character \"\"\" if not text or", "not ignore_chars: ignore_chars = \"\" num_thai = 0 num_ignore =", "ignore_chars: num_ignore += 1 elif isthaichar(ch): num_thai += 1 num_count", "-*- \"\"\" Check if it is Thai text \"\"\" import", "word: input text :param str ignore_chars: characters to be ignored", "\"\"\" Check if all character is Thai เป็นคำที่มีแต่อักษรไทยหรือไม่ :param str", "not ignore_chars: ignore_chars = \"\" for ch in word: if", "for ch in word: if ch not in ignore_chars and" ]
[]
[ "y) print(\"X filtered>> \", x_filtered) print(\"Y filtered>> \", y_filtered) show(x_filtered,", "y, 2) print(\"X unfiltered>> \", x) print(\"Y unfiltered>> \", y)", "map(x_filtered, y_filtered, x, y, title=\"title\"): # Generate some test data", "# base = np.random.randint(0, 10, 5) outliers = np.random.randint(10, 20,", "marker=\"o\") plt.subplots_adjust(bottom=0.25) plt.xlabel('x') plt.ylabel('y') plt.title(title) plt.legend([\"Filter\", \"Raw\"]) plt.show() # Generating", "y, 'green', marker=\"o\") plt.subplots_adjust(bottom=0.25) plt.xlabel('x') plt.ylabel('y') plt.title(title) plt.legend([\"Filter\", \"Raw\"]) plt.show()", "yedges[0], yedges[-1]] plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() heatmap, xedges, yedges", "y, ploy_nom): return savgol_filter(x, len(x) - 1, 10), savgol_filter(y, len(y)", "y, title=\"title\"): # Generate some test data heatmap, xedges, yedges", "x, y = fill_data() print(len(y)) # Savitzky-Golay filter x_filtered, y_filtered", "unfiltered>> \", x) print(\"Y unfiltered>> \", y) print(\"X filtered>> \",", "10), savgol_filter(y, len(y) - 1, 10) def map(x_filtered, y_filtered, x,", "base = np.linspace(0, 5, 11) # base = np.random.randint(0, 10,", "plt.legend([\"Filter\", \"Raw\"]) plt.show() # Generating the noisy signal x, y", "np.concatenate( (np.array([0]), MadDog.find_outliers(generate()))) # np.sin(x) + np.cos(x) + np.random.random(100) #", "return data def fill_data(): # Build random data return np.concatenate((np.array([0]),", "yedges = np.histogram2d(x, y, bins=50) extent = [xedges[0], xedges[-1], yedges[0],", "1, 10) def map(x_filtered, y_filtered, x, y, title=\"title\"): # Generate", "= fig.subplots() plt.plot(x_filtered, y_filtered, 'red', marker=\"o\") plt.plot(x, y, 'green', marker=\"o\")", "# Generate some test data heatmap, xedges, yedges = np.histogram2d(x,", "np.histogram2d(x_filtered, y_filtered, bins=50) extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]] plt.clf()", "print(\"X filtered>> \", x_filtered) print(\"Y filtered>> \", y_filtered) show(x_filtered, y_filtered,", "y, title=\"Lorem ipsum\"): # Plotting fig = plt.figure() ax =", "(np.array([0]), MadDog.find_outliers(generate()))) # np.sin(x) + np.cos(x) + np.random.random(100) # np.linspace(0,", "'green', marker=\"o\") plt.subplots_adjust(bottom=0.25) plt.xlabel('x') plt.ylabel('y') plt.title(title) plt.legend([\"Filter\", \"Raw\"]) plt.show() #", "yedges[0], yedges[-1]] plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() def show(x_filtered, y_filtered,", "outliers)) np.random.shuffle(data) return data def fill_data(): # Build random data", "np.random.shuffle(data) return data def fill_data(): # Build random data return", "plt.xlabel('x') plt.ylabel('y') plt.title(title) plt.legend([\"Filter\", \"Raw\"]) plt.show() # Generating the noisy", "np.random.random(100) # np.linspace(0, 2*np.pi, 100) def savitzky(x, y, ploy_nom): return", "ploy_nom): return savgol_filter(x, len(x) - 1, 10), savgol_filter(y, len(y) -", "generate(): # Generate random data base = np.linspace(0, 5, 11)", "import savgol_filter import matplotlib.pyplot as plt import MadDog x =", "y, bins=50) extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]] plt.clf() plt.imshow(heatmap.T,", "xedges[-1], yedges[0], yedges[-1]] plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() heatmap, xedges,", "x) print(\"Y unfiltered>> \", y) print(\"X filtered>> \", x_filtered) print(\"Y", "\", x_filtered) print(\"Y filtered>> \", y_filtered) show(x_filtered, y_filtered, x, y)", "extent=extent, origin='lower') plt.show() heatmap, xedges, yedges = np.histogram2d(x_filtered, y_filtered, bins=50)", "import numpy as np from scipy.signal import savgol_filter import matplotlib.pyplot", "np from scipy.signal import savgol_filter import matplotlib.pyplot as plt import", "bins=50) extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]] plt.clf() plt.imshow(heatmap.T, extent=extent,", "plt.show() heatmap, xedges, yedges = np.histogram2d(x_filtered, y_filtered, bins=50) extent =", "= np.linspace(0, 5, 11) # base = np.random.randint(0, 10, 5)", "Build random data return np.concatenate((np.array([0]), MadDog.find_outliers(generate()))), np.concatenate( (np.array([0]), MadDog.find_outliers(generate()))) #", "heatmap, xedges, yedges = np.histogram2d(x, y, bins=50) extent = [xedges[0],", "as np from scipy.signal import savgol_filter import matplotlib.pyplot as plt", "'red', marker=\"o\") plt.plot(x, y, 'green', marker=\"o\") plt.subplots_adjust(bottom=0.25) plt.xlabel('x') plt.ylabel('y') plt.title(title)", "import MadDog x = [] y = [] def generate():", "+ np.random.random(100) # np.linspace(0, 2*np.pi, 100) def savitzky(x, y, ploy_nom):", "test data heatmap, xedges, yedges = np.histogram2d(x, y, bins=50) extent", "def generate(): # Generate random data base = np.linspace(0, 5,", "= [] y = [] def generate(): # Generate random", "origin='lower') plt.show() heatmap, xedges, yedges = np.histogram2d(x_filtered, y_filtered, bins=50) extent", "y_filtered, x, y, title=\"Lorem ipsum\"): # Plotting fig = plt.figure()", "Generate some test data heatmap, xedges, yedges = np.histogram2d(x, y,", "= np.random.randint(10, 20, 2) data = np.concatenate((base, outliers)) np.random.shuffle(data) return", "5, 11) # base = np.random.randint(0, 10, 5) outliers =", "unfiltered>> \", y) print(\"X filtered>> \", x_filtered) print(\"Y filtered>> \",", "2) print(\"X unfiltered>> \", x) print(\"Y unfiltered>> \", y) print(\"X", "savgol_filter(y, len(y) - 1, 10) def map(x_filtered, y_filtered, x, y,", "= [] def generate(): # Generate random data base =", "return savgol_filter(x, len(x) - 1, 10), savgol_filter(y, len(y) - 1,", "ax = fig.subplots() plt.plot(x_filtered, y_filtered, 'red', marker=\"o\") plt.plot(x, y, 'green',", "np.linspace(0, 2*np.pi, 100) def savitzky(x, y, ploy_nom): return savgol_filter(x, len(x)", "filtered>> \", x_filtered) print(\"Y filtered>> \", y_filtered) show(x_filtered, y_filtered, x,", "def fill_data(): # Build random data return np.concatenate((np.array([0]), MadDog.find_outliers(generate()))), np.concatenate(", "# Build random data return np.concatenate((np.array([0]), MadDog.find_outliers(generate()))), np.concatenate( (np.array([0]), MadDog.find_outliers(generate())))", "some test data heatmap, xedges, yedges = np.histogram2d(x, y, bins=50)", "savitzky(x, y, 2) print(\"X unfiltered>> \", x) print(\"Y unfiltered>> \",", "10) def map(x_filtered, y_filtered, x, y, title=\"title\"): # Generate some", "np.concatenate((np.array([0]), MadDog.find_outliers(generate()))), np.concatenate( (np.array([0]), MadDog.find_outliers(generate()))) # np.sin(x) + np.cos(x) +", "1, 10), savgol_filter(y, len(y) - 1, 10) def map(x_filtered, y_filtered,", "y_filtered = savitzky(x, y, 2) print(\"X unfiltered>> \", x) print(\"Y", "plt import MadDog x = [] y = [] def", "signal x, y = fill_data() print(len(y)) # Savitzky-Golay filter x_filtered,", "y = [] def generate(): # Generate random data base", "import matplotlib.pyplot as plt import MadDog x = [] y", "xedges[-1], yedges[0], yedges[-1]] plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() def show(x_filtered,", "extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]] plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower')", "plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() heatmap, xedges, yedges = np.histogram2d(x_filtered, y_filtered,", "100) def savitzky(x, y, ploy_nom): return savgol_filter(x, len(x) - 1,", "def show(x_filtered, y_filtered, x, y, title=\"Lorem ipsum\"): # Plotting fig", "savitzky(x, y, ploy_nom): return savgol_filter(x, len(x) - 1, 10), savgol_filter(y,", "Plotting fig = plt.figure() ax = fig.subplots() plt.plot(x_filtered, y_filtered, 'red',", "2) data = np.concatenate((base, outliers)) np.random.shuffle(data) return data def fill_data():", "xedges, yedges = np.histogram2d(x, y, bins=50) extent = [xedges[0], xedges[-1],", "MadDog.find_outliers(generate()))) # np.sin(x) + np.cos(x) + np.random.random(100) # np.linspace(0, 2*np.pi,", "= np.histogram2d(x_filtered, y_filtered, bins=50) extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]]", "# Plotting fig = plt.figure() ax = fig.subplots() plt.plot(x_filtered, y_filtered,", "x, y, title=\"Lorem ipsum\"): # Plotting fig = plt.figure() ax", "data = np.concatenate((base, outliers)) np.random.shuffle(data) return data def fill_data(): #", "\"Raw\"]) plt.show() # Generating the noisy signal x, y =", "y_filtered, 'red', marker=\"o\") plt.plot(x, y, 'green', marker=\"o\") plt.subplots_adjust(bottom=0.25) plt.xlabel('x') plt.ylabel('y')", "# np.linspace(0, 2*np.pi, 100) def savitzky(x, y, ploy_nom): return savgol_filter(x,", "len(y) - 1, 10) def map(x_filtered, y_filtered, x, y, title=\"title\"):", "fill_data() print(len(y)) # Savitzky-Golay filter x_filtered, y_filtered = savitzky(x, y,", "plt.plot(x_filtered, y_filtered, 'red', marker=\"o\") plt.plot(x, y, 'green', marker=\"o\") plt.subplots_adjust(bottom=0.25) plt.xlabel('x')", "Generating the noisy signal x, y = fill_data() print(len(y)) #", "the noisy signal x, y = fill_data() print(len(y)) # Savitzky-Golay", "# Savitzky-Golay filter x_filtered, y_filtered = savitzky(x, y, 2) print(\"X", "[xedges[0], xedges[-1], yedges[0], yedges[-1]] plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() heatmap,", "data heatmap, xedges, yedges = np.histogram2d(x, y, bins=50) extent =", "random data base = np.linspace(0, 5, 11) # base =", "y = fill_data() print(len(y)) # Savitzky-Golay filter x_filtered, y_filtered =", "[] y = [] def generate(): # Generate random data", "np.linspace(0, 5, 11) # base = np.random.randint(0, 10, 5) outliers", "[xedges[0], xedges[-1], yedges[0], yedges[-1]] plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() def", "print(\"X unfiltered>> \", x) print(\"Y unfiltered>> \", y) print(\"X filtered>>", "MadDog x = [] y = [] def generate(): #", "filter x_filtered, y_filtered = savitzky(x, y, 2) print(\"X unfiltered>> \",", "plt.show() # Generating the noisy signal x, y = fill_data()", "np.concatenate((base, outliers)) np.random.shuffle(data) return data def fill_data(): # Build random", "= fill_data() print(len(y)) # Savitzky-Golay filter x_filtered, y_filtered = savitzky(x,", "20, 2) data = np.concatenate((base, outliers)) np.random.shuffle(data) return data def", "ipsum\"): # Plotting fig = plt.figure() ax = fig.subplots() plt.plot(x_filtered,", "y_filtered, x, y, title=\"title\"): # Generate some test data heatmap,", "x = [] y = [] def generate(): # Generate", "savgol_filter(x, len(x) - 1, 10), savgol_filter(y, len(y) - 1, 10)", "title=\"Lorem ipsum\"): # Plotting fig = plt.figure() ax = fig.subplots()", "yedges[-1]] plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() heatmap, xedges, yedges =", "5) outliers = np.random.randint(10, 20, 2) data = np.concatenate((base, outliers))", "return np.concatenate((np.array([0]), MadDog.find_outliers(generate()))), np.concatenate( (np.array([0]), MadDog.find_outliers(generate()))) # np.sin(x) + np.cos(x)", "marker=\"o\") plt.plot(x, y, 'green', marker=\"o\") plt.subplots_adjust(bottom=0.25) plt.xlabel('x') plt.ylabel('y') plt.title(title) plt.legend([\"Filter\",", "len(x) - 1, 10), savgol_filter(y, len(y) - 1, 10) def", "matplotlib.pyplot as plt import MadDog x = [] y =", "data return np.concatenate((np.array([0]), MadDog.find_outliers(generate()))), np.concatenate( (np.array([0]), MadDog.find_outliers(generate()))) # np.sin(x) +", "title=\"title\"): # Generate some test data heatmap, xedges, yedges =", "origin='lower') plt.show() def show(x_filtered, y_filtered, x, y, title=\"Lorem ipsum\"): #", "plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() def show(x_filtered, y_filtered, x, y,", "random data return np.concatenate((np.array([0]), MadDog.find_outliers(generate()))), np.concatenate( (np.array([0]), MadDog.find_outliers(generate()))) # np.sin(x)", "yedges = np.histogram2d(x_filtered, y_filtered, bins=50) extent = [xedges[0], xedges[-1], yedges[0],", "[] def generate(): # Generate random data base = np.linspace(0,", "def savitzky(x, y, ploy_nom): return savgol_filter(x, len(x) - 1, 10),", "data base = np.linspace(0, 5, 11) # base = np.random.randint(0,", "# Generate random data base = np.linspace(0, 5, 11) #", "np.random.randint(0, 10, 5) outliers = np.random.randint(10, 20, 2) data =", "show(x_filtered, y_filtered, x, y, title=\"Lorem ipsum\"): # Plotting fig =", "plt.ylabel('y') plt.title(title) plt.legend([\"Filter\", \"Raw\"]) plt.show() # Generating the noisy signal", "plt.title(title) plt.legend([\"Filter\", \"Raw\"]) plt.show() # Generating the noisy signal x,", "np.random.randint(10, 20, 2) data = np.concatenate((base, outliers)) np.random.shuffle(data) return data", "outliers = np.random.randint(10, 20, 2) data = np.concatenate((base, outliers)) np.random.shuffle(data)", "np.cos(x) + np.random.random(100) # np.linspace(0, 2*np.pi, 100) def savitzky(x, y,", "def map(x_filtered, y_filtered, x, y, title=\"title\"): # Generate some test", "data def fill_data(): # Build random data return np.concatenate((np.array([0]), MadDog.find_outliers(generate()))),", "\", x) print(\"Y unfiltered>> \", y) print(\"X filtered>> \", x_filtered)", "# Generating the noisy signal x, y = fill_data() print(len(y))", "fig = plt.figure() ax = fig.subplots() plt.plot(x_filtered, y_filtered, 'red', marker=\"o\")", "numpy as np from scipy.signal import savgol_filter import matplotlib.pyplot as", "plt.plot(x, y, 'green', marker=\"o\") plt.subplots_adjust(bottom=0.25) plt.xlabel('x') plt.ylabel('y') plt.title(title) plt.legend([\"Filter\", \"Raw\"])", "print(len(y)) # Savitzky-Golay filter x_filtered, y_filtered = savitzky(x, y, 2)", "xedges, yedges = np.histogram2d(x_filtered, y_filtered, bins=50) extent = [xedges[0], xedges[-1],", "from scipy.signal import savgol_filter import matplotlib.pyplot as plt import MadDog", "Generate random data base = np.linspace(0, 5, 11) # base", "heatmap, xedges, yedges = np.histogram2d(x_filtered, y_filtered, bins=50) extent = [xedges[0],", "\", y) print(\"X filtered>> \", x_filtered) print(\"Y filtered>> \", y_filtered)", "= np.concatenate((base, outliers)) np.random.shuffle(data) return data def fill_data(): # Build", "- 1, 10), savgol_filter(y, len(y) - 1, 10) def map(x_filtered,", "= np.random.randint(0, 10, 5) outliers = np.random.randint(10, 20, 2) data", "# np.sin(x) + np.cos(x) + np.random.random(100) # np.linspace(0, 2*np.pi, 100)", "savgol_filter import matplotlib.pyplot as plt import MadDog x = []", "fig.subplots() plt.plot(x_filtered, y_filtered, 'red', marker=\"o\") plt.plot(x, y, 'green', marker=\"o\") plt.subplots_adjust(bottom=0.25)", "x, y, title=\"title\"): # Generate some test data heatmap, xedges,", "Savitzky-Golay filter x_filtered, y_filtered = savitzky(x, y, 2) print(\"X unfiltered>>", "x_filtered, y_filtered = savitzky(x, y, 2) print(\"X unfiltered>> \", x)", "yedges[-1]] plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() def show(x_filtered, y_filtered, x,", "= np.histogram2d(x, y, bins=50) extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]]", "extent=extent, origin='lower') plt.show() def show(x_filtered, y_filtered, x, y, title=\"Lorem ipsum\"):", "base = np.random.randint(0, 10, 5) outliers = np.random.randint(10, 20, 2)", "2*np.pi, 100) def savitzky(x, y, ploy_nom): return savgol_filter(x, len(x) -", "plt.show() def show(x_filtered, y_filtered, x, y, title=\"Lorem ipsum\"): # Plotting", "= savitzky(x, y, 2) print(\"X unfiltered>> \", x) print(\"Y unfiltered>>", "MadDog.find_outliers(generate()))), np.concatenate( (np.array([0]), MadDog.find_outliers(generate()))) # np.sin(x) + np.cos(x) + np.random.random(100)", "noisy signal x, y = fill_data() print(len(y)) # Savitzky-Golay filter", "plt.subplots_adjust(bottom=0.25) plt.xlabel('x') plt.ylabel('y') plt.title(title) plt.legend([\"Filter\", \"Raw\"]) plt.show() # Generating the", "10, 5) outliers = np.random.randint(10, 20, 2) data = np.concatenate((base,", "plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() def show(x_filtered, y_filtered, x, y, title=\"Lorem", "scipy.signal import savgol_filter import matplotlib.pyplot as plt import MadDog x", "as plt import MadDog x = [] y = []", "plt.figure() ax = fig.subplots() plt.plot(x_filtered, y_filtered, 'red', marker=\"o\") plt.plot(x, y,", "= plt.figure() ax = fig.subplots() plt.plot(x_filtered, y_filtered, 'red', marker=\"o\") plt.plot(x,", "- 1, 10) def map(x_filtered, y_filtered, x, y, title=\"title\"): #", "11) # base = np.random.randint(0, 10, 5) outliers = np.random.randint(10,", "+ np.cos(x) + np.random.random(100) # np.linspace(0, 2*np.pi, 100) def savitzky(x,", "np.histogram2d(x, y, bins=50) extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]] plt.clf()", "plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show() heatmap, xedges, yedges = np.histogram2d(x_filtered,", "y_filtered, bins=50) extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]] plt.clf() plt.imshow(heatmap.T,", "np.sin(x) + np.cos(x) + np.random.random(100) # np.linspace(0, 2*np.pi, 100) def", "fill_data(): # Build random data return np.concatenate((np.array([0]), MadDog.find_outliers(generate()))), np.concatenate( (np.array([0]),", "= [xedges[0], xedges[-1], yedges[0], yedges[-1]] plt.clf() plt.imshow(heatmap.T, extent=extent, origin='lower') plt.show()", "print(\"Y unfiltered>> \", y) print(\"X filtered>> \", x_filtered) print(\"Y filtered>>" ]
[ "to map the fields with customizer IDs. feed_mapping = {", "] } feed_mapping_operation = { 'operator': 'ADD', 'operand': feed_mapping }", "] } feed_service_operation = { 'operator': 'ADD', 'operand': customizer_feed }", "2.0 (the \"License\"); # you may not use this file", "= client.GetService( 'CustomerFeedService', version='v201406') feed_item_service = client.GetService('FeedItemService', version='v201406') feed_mapping_service =", "PLACEHOLDER_FIELD_DATE } ] } feed_mapping_operation = { 'operator': 'ADD', 'operand':", "feed' ' with ID %s.') % (feed_mapping['feedMappingId'], feed_mapping['placeholderType'], feed_mapping['feedId']) else:", "else: raise Exception('No feeds were added') # Creating feed mapping", "'$1450.00', 'date': '20140615 120000', 'adGroupId': adgroups[1] } ] feed_items =", "in response['value']: print 'Feed item with ID %s was added.'", "added.') # Finally, creating a customer (account-level) feed with a", "= client.GetService('FeedItemService', version='v201406') feed_mapping_service = client.GetService( 'FeedMappingService', version='v201406') feed_service =", "we use the \"IDENTITY\" # matching function that is always", "PLACEHOLDER_FIELD_DATE = '4' PLACEHOLDER_FIELD_STRING = '5' ADGROUPS = [ 'INSERT_ADGROUP_ID_HERE',", "that is always 'true' just to associate this feed with", "raise Exception('No ads were added.') if __name__ == '__main__': #", "}, 'attributeValues': [ { 'feedAttributeId': feed_data['nameId'], 'stringValue': item['name'] }, {", "\\'%s\\' and ID %s was added with:' '\\tName attribute ID", "place. items_data = [ { 'name': 'Mars', 'price': '$1234.56', 'date':", "customizer feed. One feed per account can be used for", "'xsi_type': 'TextAd', 'headline': 'Luxury Cruise to {=%s.Name}' % FEEDNAME, 'description1':", "'<EMAIL> (<NAME>)') # Import appropriate classes from the client library.", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "'attributes': [ {'type': 'STRING', 'name': 'Name'}, {'type': 'STRING', 'name': 'Price'},", "'__main__': # Initialize client object. adwords_client = adwords.AdWordsClient.LoadFromStorage() main(adwords_client, ADGROUPS)", "('Feed with name \\'%s\\' and ID %s was added with:'", "using the # :campaign_targeting, :ad_group_targeting, or :keyword_targeting attributes. matching_function =", "def main(client, adgroups): # Initialize appropriate services. ad_group_ad_service = client.GetService('AdGroupAdService',", "in response: feed_mapping = response['value'][0] print ('Feed mapping with ID", "= client.GetService( 'FeedMappingService', version='v201406') feed_service = client.GetService('FeedService', version='v201406') # First,", "if response and 'value' in response: feed_mapping = response['value'][0] print", "that determines when to use this feed. For this case", "'operator': 'ADD', 'operand': customer_feed } response = customer_feed_service.mutate([customer_feed_operation]) if response", "See the Placeholder reference page for a list of all", "match different feed items. operations = [{ 'operator': 'ADD', 'operand':", "text_ad } } for adgroup in adgroups] print operations response", "'===ad group ad service===' print response if response and 'value'", "'ADD', 'operand': customizer_feed } response = feed_service.mutate([feed_service_operation]) if response and", "added.') # All set! We can now create ads with", "the # :campaign_targeting, :ad_group_targeting, or :keyword_targeting attributes. matching_function = {", "(feed['name'], feed['id'], feed_data['nameId'], feed_data['priceId'], feed_data['dateId']) else: raise Exception('No feeds were", "response and 'value' in response: for feed_item in response['value']: print", "response['value'][0] print 'Customer feed with ID %s was added.' %", "use this file except in compliance with the License. #", "{ 'name': 'Mars', 'price': '$1234.56', 'date': '20140601 000000', 'adGroupId': adgroups[0]", "= feed_item_service.mutate(feed_item_operations) if response and 'value' in response: for feed_item", "{=%s.Price}' % FEEDNAME, 'description2': 'Offer ends in {=countdown(%s.Date)}!' % FEEDNAME,", "and 'value' in response: for ad in response['value']: print ('\\tCreated", "}, { 'feedAttributeId': feed_data['dateId'], 'stringValue': item['date'] } ]} for item", "else: raise Exception('No feed items were added.') # Finally, creating", "# Finally, creating a customer (account-level) feed with a matching", "'type': 'BOOLEAN', 'booleanValue': 'true' } ] } customer_feed = {", "= response['value'][0] feed_data = { 'feedId': feed['id'], 'nameId': feed['attributes'][0]['id'], 'priceId':", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "associate this feed with # the customer. The targeting is", "'ID %s') % (feed['name'], feed['id'], feed_data['nameId'], feed_data['priceId'], feed_data['dateId']) else: raise", "response = feed_service.mutate([feed_service_operation]) if response and 'value' in response: feed", "License. # You may obtain a copy of the License", "with customer and adds an ad that uses the feed", "ends in {=countdown(%s.Date)}!' % FEEDNAME, 'url': 'http://www.example.com', 'displayUrl': 'www.example.com' }", "added.' % feed_item['feedItemId'] else: raise Exception('No feed items were added.')", "FeedService.mutate Tags: AdGroupAdService.mutate \"\"\" __author__ = ('<EMAIL> (<NAME>)', '<EMAIL> (<NAME>)')", "always 'true' just to associate this feed with # the", "under the License is distributed on an \"AS IS\" BASIS,", "reference page for a list of all the placeholder types", "response if response and 'value' in response: for ad in", "License for the specific language governing permissions and # limitations", "the values we'd like to place. items_data = [ {", "'stringValue': item['date'] } ]} for item in items_data] feed_item_operations =", "'nameId': feed['attributes'][0]['id'], 'priceId': feed['attributes'][1]['id'], 'dateId': feed['attributes'][2]['id'] } print ('Feed with", "added.' % feed['feedId'] else: raise Exception('No customer feeds were added.')", "ad customizer feed. Associates the feed with customer and adds", "or :keyword_targeting attributes. matching_function = { 'operator': 'IDENTITY', 'lhsOperand': [", "Reserved. # # Licensed under the Apache License, Version 2.0", "feed_mapping_operation = { 'operator': 'ADD', 'operand': feed_mapping } response =", "'2' PLACEHOLDER_FIELD_PRICE = '3' PLACEHOLDER_FIELD_DATE = '4' PLACEHOLDER_FIELD_STRING = '5'", "[ {'type': 'STRING', 'name': 'Name'}, {'type': 'STRING', 'name': 'Price'}, {'type':", "{'type': 'STRING', 'name': 'Price'}, {'type': 'DATE_TIME', 'name': 'Date'} ] }", "'feedAttributeId': feed_data['nameId'], 'stringValue': item['name'] }, { 'feedAttributeId': feed_data['priceId'], 'stringValue': item['price']", "{ 'name': FEEDNAME, 'attributes': [ {'type': 'STRING', 'name': 'Name'}, {'type':", "ID \\'%s\\', type \\'%s\\', and status \\'%s\\'.' % (ad['ad']['id'], ad['ad']['Ad.Type'],", "[{ 'operator': 'ADD', 'operand': feed_item } for feed_item in feed_items]", "feed = response['value'][0] feed_data = { 'feedId': feed['id'], 'nameId': feed['attributes'][0]['id'],", "for feed_item in feed_items] response = feed_item_service.mutate(feed_item_operations) if response and", "feed_mapping_service.mutate([feed_mapping_operation]) if response and 'value' in response: feed_mapping = response['value'][0]", "feeds were added') # Creating feed mapping to map the", "customer feeds were added.') # All set! We can now", "# First, create a customizer feed. One feed per account", "]} for item in items_data] feed_item_operations = [{ 'operator': 'ADD',", "'DATE_TIME', 'name': 'Date'} ] } feed_service_operation = { 'operator': 'ADD',", "and 'value' in response: feed = response['value'][0] feed_data = {", "{=%s.Name}' % FEEDNAME, 'description1': 'Only {=%s.Price}' % FEEDNAME, 'description2': 'Offer", "in compliance with the License. # You may obtain a", "version='v201406') customer_feed_service = client.GetService( 'CustomerFeedService', version='v201406') feed_item_service = client.GetService('FeedItemService', version='v201406')", "software # distributed under the License is distributed on an", "Associates the feed with customer and adds an ad that", "= customer_feed_service.mutate([customer_feed_operation]) if response and 'value' in response: feed =", "= client.GetService('AdGroupAdService', version='v201406') customer_feed_service = client.GetService( 'CustomerFeedService', version='v201406') feed_item_service =", "ad service===' print response if response and 'value' in response:", "response['value']: print 'Feed item with ID %s was added.' %", ":keyword_targeting attributes. matching_function = { 'operator': 'IDENTITY', 'lhsOperand': [ {", "= { 'operator': 'ADD', 'operand': customizer_feed } response = feed_service.mutate([feed_service_operation])", "# See the Placeholder reference page for a list of", "# ads. customizer_feed = { 'name': FEEDNAME, 'attributes': [ {'type':", "feed to populate dynamic data. Tags: CustomerFeedService.mutate, FeedItemService.mutate Tags: FeedMappingService.mutate,", "'displayUrl': 'www.example.com' } # We add the same ad to", "= feed_mapping_service.mutate([feed_mapping_operation]) if response and 'value' in response: feed_mapping =", "} for feed_item in feed_items] response = feed_item_service.mutate(feed_item_operations) if response", "feed items using the # :campaign_targeting, :ad_group_targeting, or :keyword_targeting attributes.", "if response and 'value' in response: for ad in response['value']:", "and date attribute' 'ID %s') % (feed['name'], feed['id'], feed_data['nameId'], feed_data['priceId'],", "feed_data['dateId'], 'fieldId': PLACEHOLDER_FIELD_DATE } ] } feed_mapping_operation = { 'operator':", "client library. from googleads import adwords # See the Placeholder", "this feed. For this case we use the \"IDENTITY\" #", "a customizer feed. One feed per account can be used", "'ADD', 'operand': { 'adGroupId': adgroup, 'ad': text_ad } } for", "the client library. from googleads import adwords # See the", "{ 'name': 'Venus', 'price': '$1450.00', 'date': '20140615 120000', 'adGroupId': adgroups[1]", "item in items_data] feed_item_operations = [{ 'operator': 'ADD', 'operand': feed_item", "Initialize appropriate services. ad_group_ad_service = client.GetService('AdGroupAdService', version='v201406') customer_feed_service = client.GetService(", "'Date'} ] } feed_service_operation = { 'operator': 'ADD', 'operand': customizer_feed", "We can now create ads with customizations. text_ad = {", "adgroup, 'ad': text_ad } } for adgroup in adgroups] print", "to place. items_data = [ { 'name': 'Mars', 'price': '$1234.56',", "-- the values we'd like to place. items_data = [", "'CustomerFeedService', version='v201406') feed_item_service = client.GetService('FeedItemService', version='v201406') feed_mapping_service = client.GetService( 'FeedMappingService',", "CustomerFeedService.mutate, FeedItemService.mutate Tags: FeedMappingService.mutate, FeedService.mutate Tags: AdGroupAdService.mutate \"\"\" __author__ =", "language governing permissions and # limitations under the License. \"\"\"Adds", "feed. For this case we use the \"IDENTITY\" # matching", "{ 'xsi_type': 'ConstantOperand', 'type': 'BOOLEAN', 'booleanValue': 'true' } ] }", "response and 'value' in response: for ad in response['value']: print", "'stringValue': item['name'] }, { 'feedAttributeId': feed_data['priceId'], 'stringValue': item['price'] }, {", "can now create ads with customizations. text_ad = { 'xsi_type':", "for ad in response['value']: print ('\\tCreated an ad with ID", "adds an ad that uses the feed to populate dynamic", "if response and 'value' in response: for feed_item in response['value']:", "the feed to populate dynamic data. Tags: CustomerFeedService.mutate, FeedItemService.mutate Tags:", "{ 'feedAttributeId': feed_data['nameId'], 'fieldId': PLACEHOLDER_FIELD_STRING }, { 'feedAttributeId': feed_data['priceId'], 'fieldId':", "\\'%s\\'.' % (ad['ad']['id'], ad['ad']['Ad.Type'], ad['status'])) else: raise Exception('No ads were", "ID %s and placeholder type %s was saved for feed'", "feed_data['nameId'], 'stringValue': item['name'] }, { 'feedAttributeId': feed_data['priceId'], 'stringValue': item['price'] },", "= '10' PLACEHOLDER_FIELD_INTEGER = '1' PLACEHOLDER_FIELD_FLOAT = '2' PLACEHOLDER_FIELD_PRICE =", "placeholder types # and fields: # https://developers.google.com/adwords/api/docs/appendix/placeholders PLACEHOLDER_AD_CUSTOMIZER = '10'", "they serve, they will show # different values, since they", "main(client, adgroups): # Initialize appropriate services. ad_group_ad_service = client.GetService('AdGroupAdService', version='v201406')", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "print 'Feed item with ID %s was added.' % feed_item['feedItemId']", "PLACEHOLDER_AD_CUSTOMIZER, 'feedId': feed_data['feedId'], 'attributeFieldMappings': [ { 'feedAttributeId': feed_data['nameId'], 'fieldId': PLACEHOLDER_FIELD_STRING", "and 'value' in response: feed_mapping = response['value'][0] print ('Feed mapping", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "items_data = [ { 'name': 'Mars', 'price': '$1234.56', 'date': '20140601", "adgroups] print operations response = ad_group_ad_service.mutate(operations) print '===ad group ad", "feed_item_operations = [{ 'operator': 'ADD', 'operand': feed_item } for feed_item", "ad['status'])) else: raise Exception('No ads were added.') if __name__ ==", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "} customer_feed = { 'feedId': feed_data['feedId'], 'matchingFunction': matching_function, 'placeholderTypes': [PLACEHOLDER_AD_CUSTOMIZER]", "to in writing, software # distributed under the License is", "feed_mapping['feedId']) else: raise Exception('No feed mappings were added.') # Now", "# See the License for the specific language governing permissions", "with ID %s and placeholder type %s was saved for", "ADGROUPS = [ 'INSERT_ADGROUP_ID_HERE', 'INSERT_ADGROUP_ID_HERE' ] FEEDNAME = 'INSERT_FEED_NAME_HERE' def", "feed_item['feedItemId'] else: raise Exception('No feed items were added.') # Finally,", "print ('Feed mapping with ID %s and placeholder type %s", "feed['attributes'][0]['id'], 'priceId': feed['attributes'][1]['id'], 'dateId': feed['attributes'][2]['id'] } print ('Feed with name", "raise Exception('No feed mappings were added.') # Now adding feed", "raise Exception('No feeds were added') # Creating feed mapping to", "# the customer. The targeting is done within the feed", "if response and 'value' in response: feed = response['value'][0] print", "or agreed to in writing, software # distributed under the", "targeting is done within the feed items using the #", "The targeting is done within the feed items using the", "'placeholderTypes': [PLACEHOLDER_AD_CUSTOMIZER] } customer_feed_operation = { 'operator': 'ADD', 'operand': customer_feed", "customizer_feed } response = feed_service.mutate([feed_service_operation]) if response and 'value' in", "required by applicable law or agreed to in writing, software", "Cruise to {=%s.Name}' % FEEDNAME, 'description1': 'Only {=%s.Price}' % FEEDNAME,", "PLACEHOLDER_FIELD_FLOAT = '2' PLACEHOLDER_FIELD_PRICE = '3' PLACEHOLDER_FIELD_DATE = '4' PLACEHOLDER_FIELD_STRING", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "All set! We can now create ads with customizations. text_ad", "with the License. # You may obtain a copy of", "ad that uses the feed to populate dynamic data. Tags:", "https://developers.google.com/adwords/api/docs/appendix/placeholders PLACEHOLDER_AD_CUSTOMIZER = '10' PLACEHOLDER_FIELD_INTEGER = '1' PLACEHOLDER_FIELD_FLOAT = '2'", "attribute ID %s and price attribute ID %s and date", "'ConstantOperand', 'type': 'BOOLEAN', 'booleanValue': 'true' } ] } customer_feed =", "type \\'%s\\', and status \\'%s\\'.' % (ad['ad']['id'], ad['ad']['Ad.Type'], ad['status'])) else:", "%s and date attribute' 'ID %s') % (feed['name'], feed['id'], feed_data['nameId'],", "[ { 'xsi_type': 'ConstantOperand', 'type': 'BOOLEAN', 'booleanValue': 'true' } ]", "# limitations under the License. \"\"\"Adds an ad customizer feed.", "= feed_service.mutate([feed_service_operation]) if response and 'value' in response: feed =", "with:' '\\tName attribute ID %s and price attribute ID %s", "added with:' '\\tName attribute ID %s and price attribute ID", "compliance with the License. # You may obtain a copy", "All Rights Reserved. # # Licensed under the Apache License,", "agreed to in writing, software # distributed under the License", "a list of all the placeholder types # and fields:", "FEEDNAME, 'description2': 'Offer ends in {=countdown(%s.Date)}!' % FEEDNAME, 'url': 'http://www.example.com',", "{=countdown(%s.Date)}!' % FEEDNAME, 'url': 'http://www.example.com', 'displayUrl': 'www.example.com' } # We", "[{'feedId': feed_data['feedId'], 'adGroupTargeting': { 'TargetingAdGroupId': item['adGroupId'] }, 'attributeValues': [ {", "%s was added.' % feed['feedId'] else: raise Exception('No customer feeds", "{ 'operator': 'IDENTITY', 'lhsOperand': [ { 'xsi_type': 'ConstantOperand', 'type': 'BOOLEAN',", "distributed under the License is distributed on an \"AS IS\"", "'Name'}, {'type': 'STRING', 'name': 'Price'}, {'type': 'DATE_TIME', 'name': 'Date'} ]", "ID %s and price attribute ID %s and date attribute'", "Inc. All Rights Reserved. # # Licensed under the Apache", "to associate this feed with # the customer. The targeting", "} response = feed_service.mutate([feed_service_operation]) if response and 'value' in response:", "in feed_items] response = feed_item_service.mutate(feed_item_operations) if response and 'value' in", "Exception('No feed mappings were added.') # Now adding feed items", "both ad groups. When they serve, they will show #", "'$1234.56', 'date': '20140601 000000', 'adGroupId': adgroups[0] }, { 'name': 'Venus',", "'STRING', 'name': 'Name'}, {'type': 'STRING', 'name': 'Price'}, {'type': 'DATE_TIME', 'name':", "use the \"IDENTITY\" # matching function that is always 'true'", "'5' ADGROUPS = [ 'INSERT_ADGROUP_ID_HERE', 'INSERT_ADGROUP_ID_HERE' ] FEEDNAME = 'INSERT_FEED_NAME_HERE'", "express or implied. # See the License for the specific", "'3' PLACEHOLDER_FIELD_DATE = '4' PLACEHOLDER_FIELD_STRING = '5' ADGROUPS = [", "except in compliance with the License. # You may obtain", "customer_feed } response = customer_feed_service.mutate([customer_feed_operation]) if response and 'value' in", "'operator': 'ADD', 'operand': { 'adGroupId': adgroup, 'ad': text_ad } }", "feed_mapping['placeholderType'], feed_mapping['feedId']) else: raise Exception('No feed mappings were added.') #", "response['value']: print ('\\tCreated an ad with ID \\'%s\\', type \\'%s\\',", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "customer (account-level) feed with a matching function # that determines", "' with ID %s.') % (feed_mapping['feedMappingId'], feed_mapping['placeholderType'], feed_mapping['feedId']) else: raise", "%s and placeholder type %s was saved for feed' '", "not use this file except in compliance with the License.", "raise Exception('No feed items were added.') # Finally, creating a", "Tags: CustomerFeedService.mutate, FeedItemService.mutate Tags: FeedMappingService.mutate, FeedService.mutate Tags: AdGroupAdService.mutate \"\"\" __author__", "} ]} for item in items_data] feed_item_operations = [{ 'operator':", "writing, software # distributed under the License is distributed on", "added.') # Now adding feed items -- the values we'd", "'10' PLACEHOLDER_FIELD_INTEGER = '1' PLACEHOLDER_FIELD_FLOAT = '2' PLACEHOLDER_FIELD_PRICE = '3'", "Exception('No ads were added.') if __name__ == '__main__': # Initialize", "('<EMAIL> (<NAME>)', '<EMAIL> (<NAME>)') # Import appropriate classes from the", "the feed with customer and adds an ad that uses", "fields with customizer IDs. feed_mapping = { 'placeholderType': PLACEHOLDER_AD_CUSTOMIZER, 'feedId':", "the \"IDENTITY\" # matching function that is always 'true' just", "you may not use this file except in compliance with", "the placeholder types # and fields: # https://developers.google.com/adwords/api/docs/appendix/placeholders PLACEHOLDER_AD_CUSTOMIZER =", "feed_item in response['value']: print 'Feed item with ID %s was", "# All set! We can now create ads with customizations.", "items. operations = [{ 'operator': 'ADD', 'operand': { 'adGroupId': adgroup,", "One feed per account can be used for all #", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "can be used for all # ads. customizer_feed = {", "For this case we use the \"IDENTITY\" # matching function", "We add the same ad to both ad groups. When", "ad groups. When they serve, they will show # different", "= ('<EMAIL> (<NAME>)', '<EMAIL> (<NAME>)') # Import appropriate classes from", "response: feed_mapping = response['value'][0] print ('Feed mapping with ID %s", "Tags: AdGroupAdService.mutate \"\"\" __author__ = ('<EMAIL> (<NAME>)', '<EMAIL> (<NAME>)') #", "'operand': customer_feed } response = customer_feed_service.mutate([customer_feed_operation]) if response and 'value'", "appropriate services. ad_group_ad_service = client.GetService('AdGroupAdService', version='v201406') customer_feed_service = client.GetService( 'CustomerFeedService',", "'url': 'http://www.example.com', 'displayUrl': 'www.example.com' } # We add the same", "googleads import adwords # See the Placeholder reference page for", "customer_feed_service = client.GetService( 'CustomerFeedService', version='v201406') feed_item_service = client.GetService('FeedItemService', version='v201406') feed_mapping_service", "items using the # :campaign_targeting, :ad_group_targeting, or :keyword_targeting attributes. matching_function", "feed_item in feed_items] response = feed_item_service.mutate(feed_item_operations) if response and 'value'", "CONDITIONS OF ANY KIND, either express or implied. # See", "use this feed. For this case we use the \"IDENTITY\"", "'adGroupId': adgroup, 'ad': text_ad } } for adgroup in adgroups]", "PLACEHOLDER_FIELD_PRICE = '3' PLACEHOLDER_FIELD_DATE = '4' PLACEHOLDER_FIELD_STRING = '5' ADGROUPS", "dynamic data. Tags: CustomerFeedService.mutate, FeedItemService.mutate Tags: FeedMappingService.mutate, FeedService.mutate Tags: AdGroupAdService.mutate", "'INSERT_FEED_NAME_HERE' def main(client, adgroups): # Initialize appropriate services. ad_group_ad_service =", "added') # Creating feed mapping to map the fields with", "account can be used for all # ads. customizer_feed =", ":ad_group_targeting, or :keyword_targeting attributes. matching_function = { 'operator': 'IDENTITY', 'lhsOperand':", "print operations response = ad_group_ad_service.mutate(operations) print '===ad group ad service==='", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "print '===ad group ad service===' print response if response and", "feed_mapping_service = client.GetService( 'FeedMappingService', version='v201406') feed_service = client.GetService('FeedService', version='v201406') #", "group ad service===' print response if response and 'value' in", "%s and price attribute ID %s and date attribute' 'ID", "'value' in response: feed = response['value'][0] print 'Customer feed with", "appropriate classes from the client library. from googleads import adwords", "'feedAttributeId': feed_data['priceId'], 'stringValue': item['price'] }, { 'feedAttributeId': feed_data['dateId'], 'stringValue': item['date']", "with ID %s was added.' % feed_item['feedItemId'] else: raise Exception('No", "= [{'feedId': feed_data['feedId'], 'adGroupTargeting': { 'TargetingAdGroupId': item['adGroupId'] }, 'attributeValues': [", "ID %s was added with:' '\\tName attribute ID %s and", "Exception('No customer feeds were added.') # All set! We can", "the feed items using the # :campaign_targeting, :ad_group_targeting, or :keyword_targeting", "{ 'feedId': feed_data['feedId'], 'matchingFunction': matching_function, 'placeholderTypes': [PLACEHOLDER_AD_CUSTOMIZER] } customer_feed_operation =", "Exception('No feeds were added') # Creating feed mapping to map", "AdGroupAdService.mutate \"\"\" __author__ = ('<EMAIL> (<NAME>)', '<EMAIL> (<NAME>)') # Import", "= [ { 'name': 'Mars', 'price': '$1234.56', 'date': '20140601 000000',", "'operator': 'IDENTITY', 'lhsOperand': [ { 'xsi_type': 'ConstantOperand', 'type': 'BOOLEAN', 'booleanValue':", "'operand': feed_item } for feed_item in feed_items] response = feed_item_service.mutate(feed_item_operations)", "__author__ = ('<EMAIL> (<NAME>)', '<EMAIL> (<NAME>)') # Import appropriate classes", "[ { 'feedAttributeId': feed_data['nameId'], 'stringValue': item['name'] }, { 'feedAttributeId': feed_data['priceId'],", "classes from the client library. from googleads import adwords #", "create a customizer feed. One feed per account can be", "% FEEDNAME, 'description1': 'Only {=%s.Price}' % FEEDNAME, 'description2': 'Offer ends", "operations = [{ 'operator': 'ADD', 'operand': { 'adGroupId': adgroup, 'ad':", "'\\tName attribute ID %s and price attribute ID %s and", "client.GetService('FeedItemService', version='v201406') feed_mapping_service = client.GetService( 'FeedMappingService', version='v201406') feed_service = client.GetService('FeedService',", "a matching function # that determines when to use this", "OR CONDITIONS OF ANY KIND, either express or implied. #", "print 'Customer feed with ID %s was added.' % feed['feedId']", "'operand': feed_mapping } response = feed_mapping_service.mutate([feed_mapping_operation]) if response and 'value'", "feed_item } for feed_item in feed_items] response = feed_item_service.mutate(feed_item_operations) if", "ads with customizations. text_ad = { 'xsi_type': 'TextAd', 'headline': 'Luxury", "{ 'feedAttributeId': feed_data['nameId'], 'stringValue': item['name'] }, { 'feedAttributeId': feed_data['priceId'], 'stringValue':", "the License is distributed on an \"AS IS\" BASIS, #", "and price attribute ID %s and date attribute' 'ID %s')", "'value' in response: for feed_item in response['value']: print 'Feed item", "different values, since they match different feed items. operations =", "version='v201406') feed_service = client.GetService('FeedService', version='v201406') # First, create a customizer", "}, { 'feedAttributeId': feed_data['dateId'], 'fieldId': PLACEHOLDER_FIELD_DATE } ] } feed_mapping_operation", "[ 'INSERT_ADGROUP_ID_HERE', 'INSERT_ADGROUP_ID_HERE' ] FEEDNAME = 'INSERT_FEED_NAME_HERE' def main(client, adgroups):", "in {=countdown(%s.Date)}!' % FEEDNAME, 'url': 'http://www.example.com', 'displayUrl': 'www.example.com' } #", "# Import appropriate classes from the client library. from googleads", "and ID %s was added with:' '\\tName attribute ID %s", "'Only {=%s.Price}' % FEEDNAME, 'description2': 'Offer ends in {=countdown(%s.Date)}!' %", "'Offer ends in {=countdown(%s.Date)}!' % FEEDNAME, 'url': 'http://www.example.com', 'displayUrl': 'www.example.com'", "feed_data['priceId'], 'fieldId': PLACEHOLDER_FIELD_PRICE }, { 'feedAttributeId': feed_data['dateId'], 'fieldId': PLACEHOLDER_FIELD_DATE }", "they will show # different values, since they match different", "version='v201406') # First, create a customizer feed. One feed per", "an ad with ID \\'%s\\', type \\'%s\\', and status \\'%s\\'.'", "First, create a customizer feed. One feed per account can", "item with ID %s was added.' % feed_item['feedItemId'] else: raise", "be used for all # ads. customizer_feed = { 'name':", "'name': 'Venus', 'price': '$1450.00', 'date': '20140615 120000', 'adGroupId': adgroups[1] }", "# Initialize appropriate services. ad_group_ad_service = client.GetService('AdGroupAdService', version='v201406') customer_feed_service =", "an ad customizer feed. Associates the feed with customer and", "PLACEHOLDER_FIELD_INTEGER = '1' PLACEHOLDER_FIELD_FLOAT = '2' PLACEHOLDER_FIELD_PRICE = '3' PLACEHOLDER_FIELD_DATE", "feed mappings were added.') # Now adding feed items --", "feed with ID %s was added.' % feed['feedId'] else: raise", "'feedId': feed_data['feedId'], 'matchingFunction': matching_function, 'placeholderTypes': [PLACEHOLDER_AD_CUSTOMIZER] } customer_feed_operation = {", "for feed_item in response['value']: print 'Feed item with ID %s", "= response['value'][0] print 'Customer feed with ID %s was added.'", "'name': 'Date'} ] } feed_service_operation = { 'operator': 'ADD', 'operand':", "} feed_service_operation = { 'operator': 'ADD', 'operand': customizer_feed } response", "law or agreed to in writing, software # distributed under", "'BOOLEAN', 'booleanValue': 'true' } ] } customer_feed = { 'feedId':", "'true' just to associate this feed with # the customer.", "'20140601 000000', 'adGroupId': adgroups[0] }, { 'name': 'Venus', 'price': '$1450.00',", ":campaign_targeting, :ad_group_targeting, or :keyword_targeting attributes. matching_function = { 'operator': 'IDENTITY',", "[{ 'operator': 'ADD', 'operand': { 'adGroupId': adgroup, 'ad': text_ad }", "# Now adding feed items -- the values we'd like", "= [ 'INSERT_ADGROUP_ID_HERE', 'INSERT_ADGROUP_ID_HERE' ] FEEDNAME = 'INSERT_FEED_NAME_HERE' def main(client,", "%s.') % (feed_mapping['feedMappingId'], feed_mapping['placeholderType'], feed_mapping['feedId']) else: raise Exception('No feed mappings", "response: feed = response['value'][0] print 'Customer feed with ID %s", "add the same ad to both ad groups. When they", "'STRING', 'name': 'Price'}, {'type': 'DATE_TIME', 'name': 'Date'} ] } feed_service_operation", "{ 'feedId': feed['id'], 'nameId': feed['attributes'][0]['id'], 'priceId': feed['attributes'][1]['id'], 'dateId': feed['attributes'][2]['id'] }", "% (feed['name'], feed['id'], feed_data['nameId'], feed_data['priceId'], feed_data['dateId']) else: raise Exception('No feeds", "} # We add the same ad to both ad", "feed_mapping = { 'placeholderType': PLACEHOLDER_AD_CUSTOMIZER, 'feedId': feed_data['feedId'], 'attributeFieldMappings': [ {", "{ 'operator': 'ADD', 'operand': customizer_feed } response = feed_service.mutate([feed_service_operation]) if", "used for all # ads. customizer_feed = { 'name': FEEDNAME,", "print ('\\tCreated an ad with ID \\'%s\\', type \\'%s\\', and", "ad in response['value']: print ('\\tCreated an ad with ID \\'%s\\',", "ads. customizer_feed = { 'name': FEEDNAME, 'attributes': [ {'type': 'STRING',", "client.GetService('AdGroupAdService', version='v201406') customer_feed_service = client.GetService( 'CustomerFeedService', version='v201406') feed_item_service = client.GetService('FeedItemService',", "for a list of all the placeholder types # and", "] } customer_feed = { 'feedId': feed_data['feedId'], 'matchingFunction': matching_function, 'placeholderTypes':", "is done within the feed items using the # :campaign_targeting,", "date attribute' 'ID %s') % (feed['name'], feed['id'], feed_data['nameId'], feed_data['priceId'], feed_data['dateId'])", "values, since they match different feed items. operations = [{", "feed. One feed per account can be used for all", "2014 Google Inc. All Rights Reserved. # # Licensed under", "the fields with customizer IDs. feed_mapping = { 'placeholderType': PLACEHOLDER_AD_CUSTOMIZER,", "'priceId': feed['attributes'][1]['id'], 'dateId': feed['attributes'][2]['id'] } print ('Feed with name \\'%s\\'", "'xsi_type': 'ConstantOperand', 'type': 'BOOLEAN', 'booleanValue': 'true' } ] } customer_feed", "% FEEDNAME, 'url': 'http://www.example.com', 'displayUrl': 'www.example.com' } # We add", "saved for feed' ' with ID %s.') % (feed_mapping['feedMappingId'], feed_mapping['placeholderType'],", "may obtain a copy of the License at # #", "per account can be used for all # ads. customizer_feed", "the Placeholder reference page for a list of all the", "'TextAd', 'headline': 'Luxury Cruise to {=%s.Name}' % FEEDNAME, 'description1': 'Only", "'adGroupId': adgroups[0] }, { 'name': 'Venus', 'price': '$1450.00', 'date': '20140615", "print ('Feed with name \\'%s\\' and ID %s was added", "'FeedMappingService', version='v201406') feed_service = client.GetService('FeedService', version='v201406') # First, create a", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "a customer (account-level) feed with a matching function # that", "} response = customer_feed_service.mutate([customer_feed_operation]) if response and 'value' in response:", "'feedId': feed_data['feedId'], 'attributeFieldMappings': [ { 'feedAttributeId': feed_data['nameId'], 'fieldId': PLACEHOLDER_FIELD_STRING },", "and adds an ad that uses the feed to populate", "may not use this file except in compliance with the", "response = customer_feed_service.mutate([customer_feed_operation]) if response and 'value' in response: feed", "{ 'xsi_type': 'TextAd', 'headline': 'Luxury Cruise to {=%s.Name}' % FEEDNAME,", "feed['id'], 'nameId': feed['attributes'][0]['id'], 'priceId': feed['attributes'][1]['id'], 'dateId': feed['attributes'][2]['id'] } print ('Feed", "uses the feed to populate dynamic data. Tags: CustomerFeedService.mutate, FeedItemService.mutate", "adgroups): # Initialize appropriate services. ad_group_ad_service = client.GetService('AdGroupAdService', version='v201406') customer_feed_service", "= ad_group_ad_service.mutate(operations) print '===ad group ad service===' print response if", "{'type': 'DATE_TIME', 'name': 'Date'} ] } feed_service_operation = { 'operator':", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "License. \"\"\"Adds an ad customizer feed. Associates the feed with", "this file except in compliance with the License. # You", "Creating feed mapping to map the fields with customizer IDs.", "in response: feed = response['value'][0] print 'Customer feed with ID", "= '4' PLACEHOLDER_FIELD_STRING = '5' ADGROUPS = [ 'INSERT_ADGROUP_ID_HERE', 'INSERT_ADGROUP_ID_HERE'", "all # ads. customizer_feed = { 'name': FEEDNAME, 'attributes': [", "set! We can now create ads with customizations. text_ad =", "under the License. \"\"\"Adds an ad customizer feed. Associates the", "'operand': customizer_feed } response = feed_service.mutate([feed_service_operation]) if response and 'value'", "} ] feed_items = [{'feedId': feed_data['feedId'], 'adGroupTargeting': { 'TargetingAdGroupId': item['adGroupId']", "response: for feed_item in response['value']: print 'Feed item with ID", "customizer_feed = { 'name': FEEDNAME, 'attributes': [ {'type': 'STRING', 'name':", "'operator': 'ADD', 'operand': feed_item } for feed_item in feed_items] response", "# # Copyright 2014 Google Inc. All Rights Reserved. #", "feed_data['dateId']) else: raise Exception('No feeds were added') # Creating feed", "ads were added.') if __name__ == '__main__': # Initialize client", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "now create ads with customizations. text_ad = { 'xsi_type': 'TextAd',", "PLACEHOLDER_AD_CUSTOMIZER = '10' PLACEHOLDER_FIELD_INTEGER = '1' PLACEHOLDER_FIELD_FLOAT = '2' PLACEHOLDER_FIELD_PRICE", "'value' in response: feed = response['value'][0] feed_data = { 'feedId':", "response: feed = response['value'][0] feed_data = { 'feedId': feed['id'], 'nameId':", "# # Licensed under the Apache License, Version 2.0 (the", "permissions and # limitations under the License. \"\"\"Adds an ad", "\"\"\" __author__ = ('<EMAIL> (<NAME>)', '<EMAIL> (<NAME>)') # Import appropriate", "'1' PLACEHOLDER_FIELD_FLOAT = '2' PLACEHOLDER_FIELD_PRICE = '3' PLACEHOLDER_FIELD_DATE = '4'", "types # and fields: # https://developers.google.com/adwords/api/docs/appendix/placeholders PLACEHOLDER_AD_CUSTOMIZER = '10' PLACEHOLDER_FIELD_INTEGER", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "with name \\'%s\\' and ID %s was added with:' '\\tName", "items -- the values we'd like to place. items_data =", "adgroups[0] }, { 'name': 'Venus', 'price': '$1450.00', 'date': '20140615 120000',", "'dateId': feed['attributes'][2]['id'] } print ('Feed with name \\'%s\\' and ID", "feed_data['priceId'], feed_data['dateId']) else: raise Exception('No feeds were added') # Creating", "customer_feed = { 'feedId': feed_data['feedId'], 'matchingFunction': matching_function, 'placeholderTypes': [PLACEHOLDER_AD_CUSTOMIZER] }", "Now adding feed items -- the values we'd like to", "this feed with # the customer. The targeting is done", "'http://www.example.com', 'displayUrl': 'www.example.com' } # We add the same ad", "'name': FEEDNAME, 'attributes': [ {'type': 'STRING', 'name': 'Name'}, {'type': 'STRING',", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "for all # ads. customizer_feed = { 'name': FEEDNAME, 'attributes':", "'INSERT_ADGROUP_ID_HERE' ] FEEDNAME = 'INSERT_FEED_NAME_HERE' def main(client, adgroups): # Initialize", "fields: # https://developers.google.com/adwords/api/docs/appendix/placeholders PLACEHOLDER_AD_CUSTOMIZER = '10' PLACEHOLDER_FIELD_INTEGER = '1' PLACEHOLDER_FIELD_FLOAT", "in response: feed = response['value'][0] feed_data = { 'feedId': feed['id'],", "= [{ 'operator': 'ADD', 'operand': { 'adGroupId': adgroup, 'ad': text_ad", "Exception('No feed items were added.') # Finally, creating a customer", "map the fields with customizer IDs. feed_mapping = { 'placeholderType':", "ad_group_ad_service.mutate(operations) print '===ad group ad service===' print response if response", "values we'd like to place. items_data = [ { 'name':", "response['value'][0] print ('Feed mapping with ID %s and placeholder type", "just to associate this feed with # the customer. The", "response and 'value' in response: feed_mapping = response['value'][0] print ('Feed", "# that determines when to use this feed. For this", "we'd like to place. items_data = [ { 'name': 'Mars',", "function that is always 'true' just to associate this feed", "}, { 'name': 'Venus', 'price': '$1450.00', 'date': '20140615 120000', 'adGroupId':", "Import appropriate classes from the client library. from googleads import", "Google Inc. All Rights Reserved. # # Licensed under the", "% (feed_mapping['feedMappingId'], feed_mapping['placeholderType'], feed_mapping['feedId']) else: raise Exception('No feed mappings were", "Finally, creating a customer (account-level) feed with a matching function", "(ad['ad']['id'], ad['ad']['Ad.Type'], ad['status'])) else: raise Exception('No ads were added.') if", "%s was added with:' '\\tName attribute ID %s and price", "mapping to map the fields with customizer IDs. feed_mapping =", "will show # different values, since they match different feed", "'Price'}, {'type': 'DATE_TIME', 'name': 'Date'} ] } feed_service_operation = {", "'20140615 120000', 'adGroupId': adgroups[1] } ] feed_items = [{'feedId': feed_data['feedId'],", "'INSERT_ADGROUP_ID_HERE', 'INSERT_ADGROUP_ID_HERE' ] FEEDNAME = 'INSERT_FEED_NAME_HERE' def main(client, adgroups): #", "= '2' PLACEHOLDER_FIELD_PRICE = '3' PLACEHOLDER_FIELD_DATE = '4' PLACEHOLDER_FIELD_STRING =", "adwords # See the Placeholder reference page for a list", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "FeedMappingService.mutate, FeedService.mutate Tags: AdGroupAdService.mutate \"\"\" __author__ = ('<EMAIL> (<NAME>)', '<EMAIL>", "Placeholder reference page for a list of all the placeholder", "'fieldId': PLACEHOLDER_FIELD_STRING }, { 'feedAttributeId': feed_data['priceId'], 'fieldId': PLACEHOLDER_FIELD_PRICE }, {", "('\\tCreated an ad with ID \\'%s\\', type \\'%s\\', and status", "'Feed item with ID %s was added.' % feed_item['feedItemId'] else:", "'true' } ] } customer_feed = { 'feedId': feed_data['feedId'], 'matchingFunction':", "(account-level) feed with a matching function # that determines when", "with a matching function # that determines when to use", "with customizations. text_ad = { 'xsi_type': 'TextAd', 'headline': 'Luxury Cruise", "Tags: FeedMappingService.mutate, FeedService.mutate Tags: AdGroupAdService.mutate \"\"\" __author__ = ('<EMAIL> (<NAME>)',", "customizations. text_ad = { 'xsi_type': 'TextAd', 'headline': 'Luxury Cruise to", "within the feed items using the # :campaign_targeting, :ad_group_targeting, or", "is always 'true' just to associate this feed with #", "or implied. # See the License for the specific language", "'feedAttributeId': feed_data['nameId'], 'fieldId': PLACEHOLDER_FIELD_STRING }, { 'feedAttributeId': feed_data['priceId'], 'fieldId': PLACEHOLDER_FIELD_PRICE", "Rights Reserved. # # Licensed under the Apache License, Version", "} print ('Feed with name \\'%s\\' and ID %s was", "%s was added.' % feed_item['feedItemId'] else: raise Exception('No feed items", "in response: for ad in response['value']: print ('\\tCreated an ad", "'date': '20140615 120000', 'adGroupId': adgroups[1] } ] feed_items = [{'feedId':", "ID %s was added.' % feed_item['feedItemId'] else: raise Exception('No feed", "% feed['feedId'] else: raise Exception('No customer feeds were added.') #", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "items were added.') # Finally, creating a customer (account-level) feed", "'placeholderType': PLACEHOLDER_AD_CUSTOMIZER, 'feedId': feed_data['feedId'], 'attributeFieldMappings': [ { 'feedAttributeId': feed_data['nameId'], 'fieldId':", "} ] } feed_mapping_operation = { 'operator': 'ADD', 'operand': feed_mapping", "was added with:' '\\tName attribute ID %s and price attribute", "import adwords # See the Placeholder reference page for a", "FEEDNAME = 'INSERT_FEED_NAME_HERE' def main(client, adgroups): # Initialize appropriate services.", "attribute' 'ID %s') % (feed['name'], feed['id'], feed_data['nameId'], feed_data['priceId'], feed_data['dateId']) else:", "feed_data['nameId'], feed_data['priceId'], feed_data['dateId']) else: raise Exception('No feeds were added') #", "'feedAttributeId': feed_data['priceId'], 'fieldId': PLACEHOLDER_FIELD_PRICE }, { 'feedAttributeId': feed_data['dateId'], 'fieldId': PLACEHOLDER_FIELD_DATE", "response: for ad in response['value']: print ('\\tCreated an ad with", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "} customer_feed_operation = { 'operator': 'ADD', 'operand': customer_feed } response", "{ 'feedAttributeId': feed_data['priceId'], 'stringValue': item['price'] }, { 'feedAttributeId': feed_data['dateId'], 'stringValue':", "customer_feed_service.mutate([customer_feed_operation]) if response and 'value' in response: feed = response['value'][0]", "with ID %s was added.' % feed['feedId'] else: raise Exception('No", "# Copyright 2014 Google Inc. All Rights Reserved. # #", "type %s was saved for feed' ' with ID %s.')", "feed_mapping = response['value'][0] print ('Feed mapping with ID %s and", "feed_data['feedId'], 'attributeFieldMappings': [ { 'feedAttributeId': feed_data['nameId'], 'fieldId': PLACEHOLDER_FIELD_STRING }, {", "{ 'feedAttributeId': feed_data['dateId'], 'stringValue': item['date'] } ]} for item in", "function # that determines when to use this feed. For", "(the \"License\"); # you may not use this file except", "Copyright 2014 Google Inc. All Rights Reserved. # # Licensed", "# you may not use this file except in compliance", "feed_service = client.GetService('FeedService', version='v201406') # First, create a customizer feed.", "{ 'operator': 'ADD', 'operand': feed_mapping } response = feed_mapping_service.mutate([feed_mapping_operation]) if", "service===' print response if response and 'value' in response: for", "'Luxury Cruise to {=%s.Name}' % FEEDNAME, 'description1': 'Only {=%s.Price}' %", "# :campaign_targeting, :ad_group_targeting, or :keyword_targeting attributes. matching_function = { 'operator':", "were added.') # All set! We can now create ads", "(<NAME>)') # Import appropriate classes from the client library. from", "'attributeValues': [ { 'feedAttributeId': feed_data['nameId'], 'stringValue': item['name'] }, { 'feedAttributeId':", "ID %s.') % (feed_mapping['feedMappingId'], feed_mapping['placeholderType'], feed_mapping['feedId']) else: raise Exception('No feed", "the License. \"\"\"Adds an ad customizer feed. Associates the feed", "library. from googleads import adwords # See the Placeholder reference", "'Venus', 'price': '$1450.00', 'date': '20140615 120000', 'adGroupId': adgroups[1] } ]", "were added') # Creating feed mapping to map the fields", "item['adGroupId'] }, 'attributeValues': [ { 'feedAttributeId': feed_data['nameId'], 'stringValue': item['name'] },", "if response and 'value' in response: feed = response['value'][0] feed_data", "% (ad['ad']['id'], ad['ad']['Ad.Type'], ad['status'])) else: raise Exception('No ads were added.')", "feed per account can be used for all # ads.", "feed_item_service.mutate(feed_item_operations) if response and 'value' in response: for feed_item in", "000000', 'adGroupId': adgroups[0] }, { 'name': 'Venus', 'price': '$1450.00', 'date':", "# # Unless required by applicable law or agreed to", "%s was saved for feed' ' with ID %s.') %", "'feedAttributeId': feed_data['dateId'], 'stringValue': item['date'] } ]} for item in items_data]", "feed['attributes'][1]['id'], 'dateId': feed['attributes'][2]['id'] } print ('Feed with name \\'%s\\' and", "and # limitations under the License. \"\"\"Adds an ad customizer", "% feed_item['feedItemId'] else: raise Exception('No feed items were added.') #", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "= { 'name': FEEDNAME, 'attributes': [ {'type': 'STRING', 'name': 'Name'},", "'4' PLACEHOLDER_FIELD_STRING = '5' ADGROUPS = [ 'INSERT_ADGROUP_ID_HERE', 'INSERT_ADGROUP_ID_HERE' ]", "was saved for feed' ' with ID %s.') % (feed_mapping['feedMappingId'],", "'adGroupTargeting': { 'TargetingAdGroupId': item['adGroupId'] }, 'attributeValues': [ { 'feedAttributeId': feed_data['nameId'],", "Version 2.0 (the \"License\"); # you may not use this", "{ 'feedAttributeId': feed_data['priceId'], 'fieldId': PLACEHOLDER_FIELD_PRICE }, { 'feedAttributeId': feed_data['dateId'], 'fieldId':", "data. Tags: CustomerFeedService.mutate, FeedItemService.mutate Tags: FeedMappingService.mutate, FeedService.mutate Tags: AdGroupAdService.mutate \"\"\"", "PLACEHOLDER_FIELD_STRING = '5' ADGROUPS = [ 'INSERT_ADGROUP_ID_HERE', 'INSERT_ADGROUP_ID_HERE' ] FEEDNAME", "were added.') # Finally, creating a customer (account-level) feed with", "'headline': 'Luxury Cruise to {=%s.Name}' % FEEDNAME, 'description1': 'Only {=%s.Price}'", "\"\"\"Adds an ad customizer feed. Associates the feed with customer", "feeds were added.') # All set! We can now create", "'matchingFunction': matching_function, 'placeholderTypes': [PLACEHOLDER_AD_CUSTOMIZER] } customer_feed_operation = { 'operator': 'ADD',", "when to use this feed. For this case we use", "customer and adds an ad that uses the feed to", "like to place. items_data = [ { 'name': 'Mars', 'price':", "with customizer IDs. feed_mapping = { 'placeholderType': PLACEHOLDER_AD_CUSTOMIZER, 'feedId': feed_data['feedId'],", "'lhsOperand': [ { 'xsi_type': 'ConstantOperand', 'type': 'BOOLEAN', 'booleanValue': 'true' }", "they match different feed items. operations = [{ 'operator': 'ADD',", "client.GetService('FeedService', version='v201406') # First, create a customizer feed. One feed", "feed with customer and adds an ad that uses the", "placeholder type %s was saved for feed' ' with ID", "implied. # See the License for the specific language governing", "populate dynamic data. Tags: CustomerFeedService.mutate, FeedItemService.mutate Tags: FeedMappingService.mutate, FeedService.mutate Tags:", "feed_data['priceId'], 'stringValue': item['price'] }, { 'feedAttributeId': feed_data['dateId'], 'stringValue': item['date'] }", "for adgroup in adgroups] print operations response = ad_group_ad_service.mutate(operations) print", "else: raise Exception('No feed mappings were added.') # Now adding", "for item in items_data] feed_item_operations = [{ 'operator': 'ADD', 'operand':", "feed mapping to map the fields with customizer IDs. feed_mapping", "under the Apache License, Version 2.0 (the \"License\"); # you", "governing permissions and # limitations under the License. \"\"\"Adds an", "} } for adgroup in adgroups] print operations response =", "ID %s and date attribute' 'ID %s') % (feed['name'], feed['id'],", "== '__main__': # Initialize client object. adwords_client = adwords.AdWordsClient.LoadFromStorage() main(adwords_client,", "from googleads import adwords # See the Placeholder reference page", "= '5' ADGROUPS = [ 'INSERT_ADGROUP_ID_HERE', 'INSERT_ADGROUP_ID_HERE' ] FEEDNAME =", "by applicable law or agreed to in writing, software #", "PLACEHOLDER_FIELD_STRING }, { 'feedAttributeId': feed_data['priceId'], 'fieldId': PLACEHOLDER_FIELD_PRICE }, { 'feedAttributeId':", "'feedId': feed['id'], 'nameId': feed['attributes'][0]['id'], 'priceId': feed['attributes'][1]['id'], 'dateId': feed['attributes'][2]['id'] } print", "customizer IDs. feed_mapping = { 'placeholderType': PLACEHOLDER_AD_CUSTOMIZER, 'feedId': feed_data['feedId'], 'attributeFieldMappings':", "response = feed_mapping_service.mutate([feed_mapping_operation]) if response and 'value' in response: feed_mapping", "'TargetingAdGroupId': item['adGroupId'] }, 'attributeValues': [ { 'feedAttributeId': feed_data['nameId'], 'stringValue': item['name']", "name \\'%s\\' and ID %s was added with:' '\\tName attribute", "(<NAME>)', '<EMAIL> (<NAME>)') # Import appropriate classes from the client", "with ID %s.') % (feed_mapping['feedMappingId'], feed_mapping['placeholderType'], feed_mapping['feedId']) else: raise Exception('No", "= client.GetService('FeedService', version='v201406') # First, create a customizer feed. One", "{ 'placeholderType': PLACEHOLDER_AD_CUSTOMIZER, 'feedId': feed_data['feedId'], 'attributeFieldMappings': [ { 'feedAttributeId': feed_data['nameId'],", "feed items -- the values we'd like to place. items_data", "\"IDENTITY\" # matching function that is always 'true' just to", "\\'%s\\', type \\'%s\\', and status \\'%s\\'.' % (ad['ad']['id'], ad['ad']['Ad.Type'], ad['status']))", "[PLACEHOLDER_AD_CUSTOMIZER] } customer_feed_operation = { 'operator': 'ADD', 'operand': customer_feed }", "else: raise Exception('No ads were added.') if __name__ == '__main__':", "attribute ID %s and date attribute' 'ID %s') % (feed['name'],", "'ADD', 'operand': customer_feed } response = customer_feed_service.mutate([customer_feed_operation]) if response and", "feed['id'], feed_data['nameId'], feed_data['priceId'], feed_data['dateId']) else: raise Exception('No feeds were added')", "{'type': 'STRING', 'name': 'Name'}, {'type': 'STRING', 'name': 'Price'}, {'type': 'DATE_TIME',", "attributes. matching_function = { 'operator': 'IDENTITY', 'lhsOperand': [ { 'xsi_type':", "list of all the placeholder types # and fields: #", "to populate dynamic data. Tags: CustomerFeedService.mutate, FeedItemService.mutate Tags: FeedMappingService.mutate, FeedService.mutate", "= { 'operator': 'IDENTITY', 'lhsOperand': [ { 'xsi_type': 'ConstantOperand', 'type':", "item['date'] } ]} for item in items_data] feed_item_operations = [{", "feed items were added.') # Finally, creating a customer (account-level)", "in adgroups] print operations response = ad_group_ad_service.mutate(operations) print '===ad group", "item['name'] }, { 'feedAttributeId': feed_data['priceId'], 'stringValue': item['price'] }, { 'feedAttributeId':", "'operator': 'ADD', 'operand': customizer_feed } response = feed_service.mutate([feed_service_operation]) if response", "120000', 'adGroupId': adgroups[1] } ] feed_items = [{'feedId': feed_data['feedId'], 'adGroupTargeting':", "with # the customer. The targeting is done within the", "feed_service.mutate([feed_service_operation]) if response and 'value' in response: feed = response['value'][0]", "and fields: # https://developers.google.com/adwords/api/docs/appendix/placeholders PLACEHOLDER_AD_CUSTOMIZER = '10' PLACEHOLDER_FIELD_INTEGER = '1'", "was added.' % feed['feedId'] else: raise Exception('No customer feeds were", "response and 'value' in response: feed = response['value'][0] feed_data =", "feed. Associates the feed with customer and adds an ad", "} response = feed_mapping_service.mutate([feed_mapping_operation]) if response and 'value' in response:", "to both ad groups. When they serve, they will show", "of all the placeholder types # and fields: # https://developers.google.com/adwords/api/docs/appendix/placeholders", "# Creating feed mapping to map the fields with customizer", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "'booleanValue': 'true' } ] } customer_feed = { 'feedId': feed_data['feedId'],", "Unless required by applicable law or agreed to in writing,", "version='v201406') feed_item_service = client.GetService('FeedItemService', version='v201406') feed_mapping_service = client.GetService( 'FeedMappingService', version='v201406')", "('Feed mapping with ID %s and placeholder type %s was", "print response if response and 'value' in response: for ad", "feed with a matching function # that determines when to", "IDs. feed_mapping = { 'placeholderType': PLACEHOLDER_AD_CUSTOMIZER, 'feedId': feed_data['feedId'], 'attributeFieldMappings': [", "matching function # that determines when to use this feed.", "# and fields: # https://developers.google.com/adwords/api/docs/appendix/placeholders PLACEHOLDER_AD_CUSTOMIZER = '10' PLACEHOLDER_FIELD_INTEGER =", "'attributeFieldMappings': [ { 'feedAttributeId': feed_data['nameId'], 'fieldId': PLACEHOLDER_FIELD_STRING }, { 'feedAttributeId':", "to use this feed. For this case we use the", "'name': 'Mars', 'price': '$1234.56', 'date': '20140601 000000', 'adGroupId': adgroups[0] },", "the specific language governing permissions and # limitations under the", "price attribute ID %s and date attribute' 'ID %s') %", "an ad that uses the feed to populate dynamic data.", "matching_function = { 'operator': 'IDENTITY', 'lhsOperand': [ { 'xsi_type': 'ConstantOperand',", "version='v201406') feed_mapping_service = client.GetService( 'FeedMappingService', version='v201406') feed_service = client.GetService('FeedService', version='v201406')", "applicable law or agreed to in writing, software # distributed", "'www.example.com' } # We add the same ad to both", "feed_service_operation = { 'operator': 'ADD', 'operand': customizer_feed } response =", "all the placeholder types # and fields: # https://developers.google.com/adwords/api/docs/appendix/placeholders PLACEHOLDER_AD_CUSTOMIZER", "different feed items. operations = [{ 'operator': 'ADD', 'operand': {", "'Mars', 'price': '$1234.56', 'date': '20140601 000000', 'adGroupId': adgroups[0] }, {", "adding feed items -- the values we'd like to place.", "[ { 'feedAttributeId': feed_data['nameId'], 'fieldId': PLACEHOLDER_FIELD_STRING }, { 'feedAttributeId': feed_data['priceId'],", "'ADD', 'operand': feed_item } for feed_item in feed_items] response =", "FEEDNAME, 'url': 'http://www.example.com', 'displayUrl': 'www.example.com' } # We add the", "'ad': text_ad } } for adgroup in adgroups] print operations", "limitations under the License. \"\"\"Adds an ad customizer feed. Associates", "= '3' PLACEHOLDER_FIELD_DATE = '4' PLACEHOLDER_FIELD_STRING = '5' ADGROUPS =", "in writing, software # distributed under the License is distributed", "feed_items = [{'feedId': feed_data['feedId'], 'adGroupTargeting': { 'TargetingAdGroupId': item['adGroupId'] }, 'attributeValues':", "ad_group_ad_service = client.GetService('AdGroupAdService', version='v201406') customer_feed_service = client.GetService( 'CustomerFeedService', version='v201406') feed_item_service", "text_ad = { 'xsi_type': 'TextAd', 'headline': 'Luxury Cruise to {=%s.Name}'", "'name': 'Name'}, {'type': 'STRING', 'name': 'Price'}, {'type': 'DATE_TIME', 'name': 'Date'}", "response = ad_group_ad_service.mutate(operations) print '===ad group ad service===' print response", "When they serve, they will show # different values, since", "in items_data] feed_item_operations = [{ 'operator': 'ADD', 'operand': feed_item }", "items_data] feed_item_operations = [{ 'operator': 'ADD', 'operand': feed_item } for", "{ 'feedAttributeId': feed_data['dateId'], 'fieldId': PLACEHOLDER_FIELD_DATE } ] } feed_mapping_operation =", "} feed_mapping_operation = { 'operator': 'ADD', 'operand': feed_mapping } response", "groups. When they serve, they will show # different values,", "feed_data['nameId'], 'fieldId': PLACEHOLDER_FIELD_STRING }, { 'feedAttributeId': feed_data['priceId'], 'fieldId': PLACEHOLDER_FIELD_PRICE },", "'operator': 'ADD', 'operand': feed_mapping } response = feed_mapping_service.mutate([feed_mapping_operation]) if response", "operations response = ad_group_ad_service.mutate(operations) print '===ad group ad service===' print", "customer. The targeting is done within the feed items using", "this case we use the \"IDENTITY\" # matching function that", "to {=%s.Name}' % FEEDNAME, 'description1': 'Only {=%s.Price}' % FEEDNAME, 'description2':", "that uses the feed to populate dynamic data. Tags: CustomerFeedService.mutate,", "\\'%s\\', and status \\'%s\\'.' % (ad['ad']['id'], ad['ad']['Ad.Type'], ad['status'])) else: raise", "] feed_items = [{'feedId': feed_data['feedId'], 'adGroupTargeting': { 'TargetingAdGroupId': item['adGroupId'] },", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "'date': '20140601 000000', 'adGroupId': adgroups[0] }, { 'name': 'Venus', 'price':", "License, Version 2.0 (the \"License\"); # you may not use", "mappings were added.') # Now adding feed items -- the", "customer_feed_operation = { 'operator': 'ADD', 'operand': customer_feed } response =", "# You may obtain a copy of the License at", "response = feed_item_service.mutate(feed_item_operations) if response and 'value' in response: for", "feed items. operations = [{ 'operator': 'ADD', 'operand': { 'adGroupId':", "'feedAttributeId': feed_data['dateId'], 'fieldId': PLACEHOLDER_FIELD_DATE } ] } feed_mapping_operation = {", "and status \\'%s\\'.' % (ad['ad']['id'], ad['ad']['Ad.Type'], ad['status'])) else: raise Exception('No", "and placeholder type %s was saved for feed' ' with", "} ] } customer_feed = { 'feedId': feed_data['feedId'], 'matchingFunction': matching_function,", "(feed_mapping['feedMappingId'], feed_mapping['placeholderType'], feed_mapping['feedId']) else: raise Exception('No feed mappings were added.')", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "'adGroupId': adgroups[1] } ] feed_items = [{'feedId': feed_data['feedId'], 'adGroupTargeting': {", "= { 'feedId': feed['id'], 'nameId': feed['attributes'][0]['id'], 'priceId': feed['attributes'][1]['id'], 'dateId': feed['attributes'][2]['id']", "= response['value'][0] print ('Feed mapping with ID %s and placeholder", "adgroup in adgroups] print operations response = ad_group_ad_service.mutate(operations) print '===ad", "serve, they will show # different values, since they match", "added.') if __name__ == '__main__': # Initialize client object. adwords_client", "feed_items] response = feed_item_service.mutate(feed_item_operations) if response and 'value' in response:", "'fieldId': PLACEHOLDER_FIELD_PRICE }, { 'feedAttributeId': feed_data['dateId'], 'fieldId': PLACEHOLDER_FIELD_DATE } ]", "in response['value']: print ('\\tCreated an ad with ID \\'%s\\', type", "the License for the specific language governing permissions and #", "done within the feed items using the # :campaign_targeting, :ad_group_targeting,", "feed['attributes'][2]['id'] } print ('Feed with name \\'%s\\' and ID %s", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "FeedItemService.mutate Tags: FeedMappingService.mutate, FeedService.mutate Tags: AdGroupAdService.mutate \"\"\" __author__ = ('<EMAIL>", "for feed' ' with ID %s.') % (feed_mapping['feedMappingId'], feed_mapping['placeholderType'], feed_mapping['feedId'])", "ad to both ad groups. When they serve, they will", "if __name__ == '__main__': # Initialize client object. adwords_client =", "the customer. The targeting is done within the feed items", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "FEEDNAME, 'attributes': [ {'type': 'STRING', 'name': 'Name'}, {'type': 'STRING', 'name':", "= 'INSERT_FEED_NAME_HERE' def main(client, adgroups): # Initialize appropriate services. ad_group_ad_service", "= [{ 'operator': 'ADD', 'operand': feed_item } for feed_item in", "} for adgroup in adgroups] print operations response = ad_group_ad_service.mutate(operations)", "'price': '$1450.00', 'date': '20140615 120000', 'adGroupId': adgroups[1] } ] feed_items", "'description1': 'Only {=%s.Price}' % FEEDNAME, 'description2': 'Offer ends in {=countdown(%s.Date)}!'", "page for a list of all the placeholder types #", "adgroups[1] } ] feed_items = [{'feedId': feed_data['feedId'], 'adGroupTargeting': { 'TargetingAdGroupId':", "FEEDNAME, 'description1': 'Only {=%s.Price}' % FEEDNAME, 'description2': 'Offer ends in", "feed_data = { 'feedId': feed['id'], 'nameId': feed['attributes'][0]['id'], 'priceId': feed['attributes'][1]['id'], 'dateId':", "from the client library. from googleads import adwords # See", "__name__ == '__main__': # Initialize client object. adwords_client = adwords.AdWordsClient.LoadFromStorage()", "client.GetService( 'FeedMappingService', version='v201406') feed_service = client.GetService('FeedService', version='v201406') # First, create", "'name': 'Price'}, {'type': 'DATE_TIME', 'name': 'Date'} ] } feed_service_operation =", "response['value'][0] feed_data = { 'feedId': feed['id'], 'nameId': feed['attributes'][0]['id'], 'priceId': feed['attributes'][1]['id'],", "= { 'operator': 'ADD', 'operand': feed_mapping } response = feed_mapping_service.mutate([feed_mapping_operation])", "PLACEHOLDER_FIELD_PRICE }, { 'feedAttributeId': feed_data['dateId'], 'fieldId': PLACEHOLDER_FIELD_DATE } ] }", "status \\'%s\\'.' % (ad['ad']['id'], ad['ad']['Ad.Type'], ad['status'])) else: raise Exception('No ads", "feed_mapping } response = feed_mapping_service.mutate([feed_mapping_operation]) if response and 'value' in", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "# We add the same ad to both ad groups.", "= '1' PLACEHOLDER_FIELD_FLOAT = '2' PLACEHOLDER_FIELD_PRICE = '3' PLACEHOLDER_FIELD_DATE =", "create ads with customizations. text_ad = { 'xsi_type': 'TextAd', 'headline':", "'ADD', 'operand': feed_mapping } response = feed_mapping_service.mutate([feed_mapping_operation]) if response and", "item['price'] }, { 'feedAttributeId': feed_data['dateId'], 'stringValue': item['date'] } ]} for", "with ID \\'%s\\', type \\'%s\\', and status \\'%s\\'.' % (ad['ad']['id'],", "feed_data['dateId'], 'stringValue': item['date'] } ]} for item in items_data] feed_item_operations", "ad with ID \\'%s\\', type \\'%s\\', and status \\'%s\\'.' %", "%s') % (feed['name'], feed['id'], feed_data['nameId'], feed_data['priceId'], feed_data['dateId']) else: raise Exception('No", "'value' in response: feed_mapping = response['value'][0] print ('Feed mapping with", "and 'value' in response: for feed_item in response['value']: print 'Feed", "same ad to both ad groups. When they serve, they", "case we use the \"IDENTITY\" # matching function that is", "'Customer feed with ID %s was added.' % feed['feedId'] else:", "% FEEDNAME, 'description2': 'Offer ends in {=countdown(%s.Date)}!' % FEEDNAME, 'url':", "# different values, since they match different feed items. operations", "'price': '$1234.56', 'date': '20140601 000000', 'adGroupId': adgroups[0] }, { 'name':", "was added.' % feed_item['feedItemId'] else: raise Exception('No feed items were", "{ 'operator': 'ADD', 'operand': customer_feed } response = customer_feed_service.mutate([customer_feed_operation]) if", "\"License\"); # you may not use this file except in", "feed['feedId'] else: raise Exception('No customer feeds were added.') # All", "= { 'feedId': feed_data['feedId'], 'matchingFunction': matching_function, 'placeholderTypes': [PLACEHOLDER_AD_CUSTOMIZER] } customer_feed_operation", "client.GetService( 'CustomerFeedService', version='v201406') feed_item_service = client.GetService('FeedItemService', version='v201406') feed_mapping_service = client.GetService(", "the same ad to both ad groups. When they serve,", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "{ 'TargetingAdGroupId': item['adGroupId'] }, 'attributeValues': [ { 'feedAttributeId': feed_data['nameId'], 'stringValue':", "'IDENTITY', 'lhsOperand': [ { 'xsi_type': 'ConstantOperand', 'type': 'BOOLEAN', 'booleanValue': 'true'", "= { 'xsi_type': 'TextAd', 'headline': 'Luxury Cruise to {=%s.Name}' %", "matching_function, 'placeholderTypes': [PLACEHOLDER_AD_CUSTOMIZER] } customer_feed_operation = { 'operator': 'ADD', 'operand':", "matching function that is always 'true' just to associate this", "feed with # the customer. The targeting is done within", "}, { 'feedAttributeId': feed_data['priceId'], 'stringValue': item['price'] }, { 'feedAttributeId': feed_data['dateId'],", "#!/usr/bin/python # # Copyright 2014 Google Inc. All Rights Reserved.", "determines when to use this feed. For this case we", "raise Exception('No customer feeds were added.') # All set! We", "'stringValue': item['price'] }, { 'feedAttributeId': feed_data['dateId'], 'stringValue': item['date'] } ]}", "# distributed under the License is distributed on an \"AS", "{ 'adGroupId': adgroup, 'ad': text_ad } } for adgroup in", "# Unless required by applicable law or agreed to in", "mapping with ID %s and placeholder type %s was saved", "= { 'operator': 'ADD', 'operand': customer_feed } response = customer_feed_service.mutate([customer_feed_operation])", "show # different values, since they match different feed items.", "'fieldId': PLACEHOLDER_FIELD_DATE } ] } feed_mapping_operation = { 'operator': 'ADD',", "customizer feed. Associates the feed with customer and adds an", "in response: for feed_item in response['value']: print 'Feed item with", "creating a customer (account-level) feed with a matching function #", "ad['ad']['Ad.Type'], ad['status'])) else: raise Exception('No ads were added.') if __name__", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "'description2': 'Offer ends in {=countdown(%s.Date)}!' % FEEDNAME, 'url': 'http://www.example.com', 'displayUrl':", "feed_data['feedId'], 'adGroupTargeting': { 'TargetingAdGroupId': item['adGroupId'] }, 'attributeValues': [ { 'feedAttributeId':", "were added.') # Now adding feed items -- the values", "and 'value' in response: feed = response['value'][0] print 'Customer feed", "feed = response['value'][0] print 'Customer feed with ID %s was", "'value' in response: for ad in response['value']: print ('\\tCreated an", "else: raise Exception('No customer feeds were added.') # All set!", "You may obtain a copy of the License at #", "services. ad_group_ad_service = client.GetService('AdGroupAdService', version='v201406') customer_feed_service = client.GetService( 'CustomerFeedService', version='v201406')", "'operand': { 'adGroupId': adgroup, 'ad': text_ad } } for adgroup", "ID %s was added.' % feed['feedId'] else: raise Exception('No customer", "# https://developers.google.com/adwords/api/docs/appendix/placeholders PLACEHOLDER_AD_CUSTOMIZER = '10' PLACEHOLDER_FIELD_INTEGER = '1' PLACEHOLDER_FIELD_FLOAT =", "[ { 'name': 'Mars', 'price': '$1234.56', 'date': '20140601 000000', 'adGroupId':", "}, { 'feedAttributeId': feed_data['priceId'], 'fieldId': PLACEHOLDER_FIELD_PRICE }, { 'feedAttributeId': feed_data['dateId'],", "response and 'value' in response: feed = response['value'][0] print 'Customer", "# matching function that is always 'true' just to associate", "since they match different feed items. operations = [{ 'operator':", "the Apache License, Version 2.0 (the \"License\"); # you may", "= { 'placeholderType': PLACEHOLDER_AD_CUSTOMIZER, 'feedId': feed_data['feedId'], 'attributeFieldMappings': [ { 'feedAttributeId':", "] FEEDNAME = 'INSERT_FEED_NAME_HERE' def main(client, adgroups): # Initialize appropriate", "were added.') if __name__ == '__main__': # Initialize client object.", "feed_item_service = client.GetService('FeedItemService', version='v201406') feed_mapping_service = client.GetService( 'FeedMappingService', version='v201406') feed_service", "feed_data['feedId'], 'matchingFunction': matching_function, 'placeholderTypes': [PLACEHOLDER_AD_CUSTOMIZER] } customer_feed_operation = { 'operator':" ]
[ "load_data() myMinHasher = MinHash(3) df = myMinHasher._sparse_vectorize(df, 'name') df =", "= MinHash(1000) res = myMinHasher.fit_predict(df, 'name') assert len(res) == 1727", "'name') assert res.columns.tolist() == ['row_number_1', 'row_number_2', 'name_1', 'name_2', 'jaccard_sim'] assert", "min(res) >= 0 assert min(res) <= myMinHasher.max_token_value def test__create_minhash(): n_hashes", "MinHash(3) df = myMinHasher._sparse_vectorize(df, 'name') df = myMinHasher._create_minhash_signatures(df) for col", "assert min(res) <= myMinHasher.max_token_value def test__create_minhash(): n_hashes = 10 myMinHasher", "len(res) == n_hashes def test__create_minhash_signatures(): df = load_data() myMinHasher =", "assert res.dtype == 'int64' assert min(res) >= 0 assert min(res)", "res = myMinHasher.fit_predict(df, 'name') assert res.columns.tolist() == ['row_number_1', 'row_number_2', 'name_1',", "MinHash(1000) res = myMinHasher.fit_predict(df, 'name') assert len(res) == 1727 res['jaccard_real']", "65, 66, 67, 118, 150, 266] res = myMinHasher._create_minhash(doc) assert", "assert len(res) == n_hashes def test__create_minhash_signatures(): df = load_data() myMinHasher", "myMinHasher._sparse_vectorize(df, 'name') assert res.columns.tolist() == ['name', 'sparse_vector'] assert res['sparse_vector'].dtype ==", "['name', 'sparse_vector'] assert res['sparse_vector'].dtype == 'object' def test__create_hashing_parameters(): n_hashes =", "== n_hashes assert res.dtype == 'int64' assert min(res) >= 0", "from pyminhash.datasets import load_data def test__sparse_vector(): df = load_data() myMinHasher", "df = load_data() myMinHasher = MinHash(10) res = myMinHasher.fit_predict(df, 'name')", "test__create_hashing_parameters(): n_hashes = 10 myMinHasher = MinHash(n_hash_tables=n_hashes) res = myMinHasher._create_hashing_parameters()", "load_data() myMinHasher = MinHash(10) res = myMinHasher.fit_predict(df, 'name') assert res.columns.tolist()", "df = load_data() myMinHasher = MinHash(1000) res = myMinHasher.fit_predict(df, 'name')", "myMinHasher = MinHash(n_hash_tables=n_hashes) doc = [59, 65, 66, 67, 118,", "'name') assert len(res) == 1727 res['jaccard_real'] = res.apply(lambda row: jaccard(row['name_1'],", "assert res.columns.tolist() == ['row_number_1', 'row_number_2', 'name_1', 'name_2', 'jaccard_sim'] assert res['jaccard_sim'].dtype", "assert res.columns.tolist() == ['name', 'sparse_vector'] assert res['sparse_vector'].dtype == 'object' def", "myMinHasher._create_minhash(doc) assert len(res) == n_hashes def test__create_minhash_signatures(): df = load_data()", "x_tokens = set(x.split()) y_tokens = set(y.split()) return len(x_tokens.intersection(y_tokens)) / len(x_tokens.union(y_tokens))", "= myMinHasher._sparse_vectorize(df, 'name') df = myMinHasher._create_minhash_signatures(df) for col in ['hash_0',", "== 'int64' assert min(res) >= 0 assert min(res) <= myMinHasher.max_token_value", "assert len(res) == n_hashes assert res.dtype == 'int64' assert min(res)", "res.dtype == 'int64' assert min(res) >= 0 assert min(res) <=", "'int64' def test_fit_predict(): df = load_data() myMinHasher = MinHash(10) res", "test_fit_predict_accuracy(): def jaccard(x, y): x_tokens = set(x.split()) y_tokens = set(y.split())", "MinHash(10) res = myMinHasher.fit_predict(df, 'name') assert res.columns.tolist() == ['row_number_1', 'row_number_2',", "col in df.columns assert df[col].dtype == 'int64' def test_fit_predict(): df", "266] res = myMinHasher._create_minhash(doc) assert len(res) == n_hashes def test__create_minhash_signatures():", "from pyminhash import MinHash from pyminhash.datasets import load_data def test__sparse_vector():", "test__create_minhash_signatures(): df = load_data() myMinHasher = MinHash(3) df = myMinHasher._sparse_vectorize(df,", "myMinHasher = MinHash(3) df = myMinHasher._sparse_vectorize(df, 'name') df = myMinHasher._create_minhash_signatures(df)", "= load_data() myMinHasher = MinHash(3) df = myMinHasher._sparse_vectorize(df, 'name') df", "MinHash(n_hash_tables=n_hashes) res = myMinHasher._create_hashing_parameters() assert len(res) == n_hashes assert res.dtype", "myMinHasher = MinHash(1000) res = myMinHasher.fit_predict(df, 'name') assert len(res) ==", "test__sparse_vector(): df = load_data() myMinHasher = MinHash(10) res = myMinHasher._sparse_vectorize(df,", "jaccard(row['name_1'], row['name_2']), axis=1) res['diff'] = res['jaccard_real'] - res['jaccard_sim'] assert abs(res['diff'].mean())", "n_hashes = 10 myMinHasher = MinHash(n_hash_tables=n_hashes) doc = [59, 65,", "def test_fit_predict_accuracy(): def jaccard(x, y): x_tokens = set(x.split()) y_tokens =", "myMinHasher._create_hashing_parameters() assert len(res) == n_hashes assert res.dtype == 'int64' assert", "len(x_tokens.intersection(y_tokens)) / len(x_tokens.union(y_tokens)) df = load_data() myMinHasher = MinHash(1000) res", "def jaccard(x, y): x_tokens = set(x.split()) y_tokens = set(y.split()) return", "= load_data() myMinHasher = MinHash(10) res = myMinHasher._sparse_vectorize(df, 'name') assert", "MinHash from pyminhash.datasets import load_data def test__sparse_vector(): df = load_data()", "in df.columns assert df[col].dtype == 'int64' def test_fit_predict(): df =", "myMinHasher = MinHash(n_hash_tables=n_hashes) res = myMinHasher._create_hashing_parameters() assert len(res) == n_hashes", "doc = [59, 65, 66, 67, 118, 150, 266] res", "assert min(res) >= 0 assert min(res) <= myMinHasher.max_token_value def test__create_minhash():", "def test_fit_predict(): df = load_data() myMinHasher = MinHash(10) res =", "in ['hash_0', 'hash_1', 'hash_2']: assert col in df.columns assert df[col].dtype", "= myMinHasher.fit_predict(df, 'name') assert res.columns.tolist() == ['row_number_1', 'row_number_2', 'name_1', 'name_2',", "myMinHasher.fit_predict(df, 'name') assert len(res) == 1727 res['jaccard_real'] = res.apply(lambda row:", "pyminhash import MinHash from pyminhash.datasets import load_data def test__sparse_vector(): df", "df = myMinHasher._create_minhash_signatures(df) for col in ['hash_0', 'hash_1', 'hash_2']: assert", "'float' def test_fit_predict_accuracy(): def jaccard(x, y): x_tokens = set(x.split()) y_tokens", "import load_data def test__sparse_vector(): df = load_data() myMinHasher = MinHash(10)", "MinHash(n_hash_tables=n_hashes) doc = [59, 65, 66, 67, 118, 150, 266]", "def test__create_minhash(): n_hashes = 10 myMinHasher = MinHash(n_hash_tables=n_hashes) doc =", "= myMinHasher._create_minhash(doc) assert len(res) == n_hashes def test__create_minhash_signatures(): df =", "150, 266] res = myMinHasher._create_minhash(doc) assert len(res) == n_hashes def", "'name_1', 'name_2', 'jaccard_sim'] assert res['jaccard_sim'].dtype == 'float' def test_fit_predict_accuracy(): def", "= set(y.split()) return len(x_tokens.intersection(y_tokens)) / len(x_tokens.union(y_tokens)) df = load_data() myMinHasher", "len(res) == n_hashes assert res.dtype == 'int64' assert min(res) >=", "= MinHash(10) res = myMinHasher.fit_predict(df, 'name') assert res.columns.tolist() == ['row_number_1',", "axis=1) res['diff'] = res['jaccard_real'] - res['jaccard_sim'] assert abs(res['diff'].mean()) < 0.02", "df.columns assert df[col].dtype == 'int64' def test_fit_predict(): df = load_data()", "n_hashes assert res.dtype == 'int64' assert min(res) >= 0 assert", "assert len(res) == 1727 res['jaccard_real'] = res.apply(lambda row: jaccard(row['name_1'], row['name_2']),", "'sparse_vector'] assert res['sparse_vector'].dtype == 'object' def test__create_hashing_parameters(): n_hashes = 10", "myMinHasher._sparse_vectorize(df, 'name') df = myMinHasher._create_minhash_signatures(df) for col in ['hash_0', 'hash_1',", "set(y.split()) return len(x_tokens.intersection(y_tokens)) / len(x_tokens.union(y_tokens)) df = load_data() myMinHasher =", "['row_number_1', 'row_number_2', 'name_1', 'name_2', 'jaccard_sim'] assert res['jaccard_sim'].dtype == 'float' def", "load_data def test__sparse_vector(): df = load_data() myMinHasher = MinHash(10) res", "load_data() myMinHasher = MinHash(1000) res = myMinHasher.fit_predict(df, 'name') assert len(res)", "'int64' assert min(res) >= 0 assert min(res) <= myMinHasher.max_token_value def", "assert col in df.columns assert df[col].dtype == 'int64' def test_fit_predict():", "= load_data() myMinHasher = MinHash(10) res = myMinHasher.fit_predict(df, 'name') assert", "return len(x_tokens.intersection(y_tokens)) / len(x_tokens.union(y_tokens)) df = load_data() myMinHasher = MinHash(1000)", "= MinHash(n_hash_tables=n_hashes) res = myMinHasher._create_hashing_parameters() assert len(res) == n_hashes assert", "res = myMinHasher._create_hashing_parameters() assert len(res) == n_hashes assert res.dtype ==", "assert res['jaccard_sim'].dtype == 'float' def test_fit_predict_accuracy(): def jaccard(x, y): x_tokens", "= load_data() myMinHasher = MinHash(1000) res = myMinHasher.fit_predict(df, 'name') assert", "res['jaccard_real'] - res['jaccard_sim'] assert abs(res['diff'].mean()) < 0.02 assert res['diff'].std() <", "= myMinHasher.fit_predict(df, 'name') assert len(res) == 1727 res['jaccard_real'] = res.apply(lambda", "= MinHash(10) res = myMinHasher._sparse_vectorize(df, 'name') assert res.columns.tolist() == ['name',", "= set(x.split()) y_tokens = set(y.split()) return len(x_tokens.intersection(y_tokens)) / len(x_tokens.union(y_tokens)) df", "row['name_2']), axis=1) res['diff'] = res['jaccard_real'] - res['jaccard_sim'] assert abs(res['diff'].mean()) <", "assert res['sparse_vector'].dtype == 'object' def test__create_hashing_parameters(): n_hashes = 10 myMinHasher", "import MinHash from pyminhash.datasets import load_data def test__sparse_vector(): df =", "load_data() myMinHasher = MinHash(10) res = myMinHasher._sparse_vectorize(df, 'name') assert res.columns.tolist()", "test__create_minhash(): n_hashes = 10 myMinHasher = MinHash(n_hash_tables=n_hashes) doc = [59,", "min(res) <= myMinHasher.max_token_value def test__create_minhash(): n_hashes = 10 myMinHasher =", "def test__sparse_vector(): df = load_data() myMinHasher = MinHash(10) res =", "myMinHasher.max_token_value def test__create_minhash(): n_hashes = 10 myMinHasher = MinHash(n_hash_tables=n_hashes) doc", "'name') df = myMinHasher._create_minhash_signatures(df) for col in ['hash_0', 'hash_1', 'hash_2']:", "'hash_1', 'hash_2']: assert col in df.columns assert df[col].dtype == 'int64'", "== ['row_number_1', 'row_number_2', 'name_1', 'name_2', 'jaccard_sim'] assert res['jaccard_sim'].dtype == 'float'", "== ['name', 'sparse_vector'] assert res['sparse_vector'].dtype == 'object' def test__create_hashing_parameters(): n_hashes", "jaccard(x, y): x_tokens = set(x.split()) y_tokens = set(y.split()) return len(x_tokens.intersection(y_tokens))", "66, 67, 118, 150, 266] res = myMinHasher._create_minhash(doc) assert len(res)", "for col in ['hash_0', 'hash_1', 'hash_2']: assert col in df.columns", "assert df[col].dtype == 'int64' def test_fit_predict(): df = load_data() myMinHasher", "res.apply(lambda row: jaccard(row['name_1'], row['name_2']), axis=1) res['diff'] = res['jaccard_real'] - res['jaccard_sim']", "= 10 myMinHasher = MinHash(n_hash_tables=n_hashes) res = myMinHasher._create_hashing_parameters() assert len(res)", "67, 118, 150, 266] res = myMinHasher._create_minhash(doc) assert len(res) ==", "y): x_tokens = set(x.split()) y_tokens = set(y.split()) return len(x_tokens.intersection(y_tokens)) /", "pytest from pyminhash import MinHash from pyminhash.datasets import load_data def", "set(x.split()) y_tokens = set(y.split()) return len(x_tokens.intersection(y_tokens)) / len(x_tokens.union(y_tokens)) df =", "= res.apply(lambda row: jaccard(row['name_1'], row['name_2']), axis=1) res['diff'] = res['jaccard_real'] -", "res.columns.tolist() == ['row_number_1', 'row_number_2', 'name_1', 'name_2', 'jaccard_sim'] assert res['jaccard_sim'].dtype ==", "df = load_data() myMinHasher = MinHash(10) res = myMinHasher._sparse_vectorize(df, 'name')", "'name') assert res.columns.tolist() == ['name', 'sparse_vector'] assert res['sparse_vector'].dtype == 'object'", "res['jaccard_sim'].dtype == 'float' def test_fit_predict_accuracy(): def jaccard(x, y): x_tokens =", "<= myMinHasher.max_token_value def test__create_minhash(): n_hashes = 10 myMinHasher = MinHash(n_hash_tables=n_hashes)", "= MinHash(n_hash_tables=n_hashes) doc = [59, 65, 66, 67, 118, 150,", "= [59, 65, 66, 67, 118, 150, 266] res =", "myMinHasher._create_minhash_signatures(df) for col in ['hash_0', 'hash_1', 'hash_2']: assert col in", "== 'object' def test__create_hashing_parameters(): n_hashes = 10 myMinHasher = MinHash(n_hash_tables=n_hashes)", "- res['jaccard_sim'] assert abs(res['diff'].mean()) < 0.02 assert res['diff'].std() < 0.1", "myMinHasher.fit_predict(df, 'name') assert res.columns.tolist() == ['row_number_1', 'row_number_2', 'name_1', 'name_2', 'jaccard_sim']", "df = myMinHasher._sparse_vectorize(df, 'name') df = myMinHasher._create_minhash_signatures(df) for col in", "res['sparse_vector'].dtype == 'object' def test__create_hashing_parameters(): n_hashes = 10 myMinHasher =", "MinHash(10) res = myMinHasher._sparse_vectorize(df, 'name') assert res.columns.tolist() == ['name', 'sparse_vector']", "10 myMinHasher = MinHash(n_hash_tables=n_hashes) res = myMinHasher._create_hashing_parameters() assert len(res) ==", "= myMinHasher._create_hashing_parameters() assert len(res) == n_hashes assert res.dtype == 'int64'", "= 10 myMinHasher = MinHash(n_hash_tables=n_hashes) doc = [59, 65, 66,", "= myMinHasher._sparse_vectorize(df, 'name') assert res.columns.tolist() == ['name', 'sparse_vector'] assert res['sparse_vector'].dtype", "= MinHash(3) df = myMinHasher._sparse_vectorize(df, 'name') df = myMinHasher._create_minhash_signatures(df) for", "0 assert min(res) <= myMinHasher.max_token_value def test__create_minhash(): n_hashes = 10", "== 1727 res['jaccard_real'] = res.apply(lambda row: jaccard(row['name_1'], row['name_2']), axis=1) res['diff']", "row: jaccard(row['name_1'], row['name_2']), axis=1) res['diff'] = res['jaccard_real'] - res['jaccard_sim'] assert", "['hash_0', 'hash_1', 'hash_2']: assert col in df.columns assert df[col].dtype ==", "'hash_2']: assert col in df.columns assert df[col].dtype == 'int64' def", "import pytest from pyminhash import MinHash from pyminhash.datasets import load_data", "== 'int64' def test_fit_predict(): df = load_data() myMinHasher = MinHash(10)", "y_tokens = set(y.split()) return len(x_tokens.intersection(y_tokens)) / len(x_tokens.union(y_tokens)) df = load_data()", "res = myMinHasher._sparse_vectorize(df, 'name') assert res.columns.tolist() == ['name', 'sparse_vector'] assert", "1727 res['jaccard_real'] = res.apply(lambda row: jaccard(row['name_1'], row['name_2']), axis=1) res['diff'] =", "myMinHasher = MinHash(10) res = myMinHasher.fit_predict(df, 'name') assert res.columns.tolist() ==", "res['jaccard_real'] = res.apply(lambda row: jaccard(row['name_1'], row['name_2']), axis=1) res['diff'] = res['jaccard_real']", "== n_hashes def test__create_minhash_signatures(): df = load_data() myMinHasher = MinHash(3)", "'jaccard_sim'] assert res['jaccard_sim'].dtype == 'float' def test_fit_predict_accuracy(): def jaccard(x, y):", "df[col].dtype == 'int64' def test_fit_predict(): df = load_data() myMinHasher =", "== 'float' def test_fit_predict_accuracy(): def jaccard(x, y): x_tokens = set(x.split())", "= res['jaccard_real'] - res['jaccard_sim'] assert abs(res['diff'].mean()) < 0.02 assert res['diff'].std()", "def test__create_minhash_signatures(): df = load_data() myMinHasher = MinHash(3) df =", "'name_2', 'jaccard_sim'] assert res['jaccard_sim'].dtype == 'float' def test_fit_predict_accuracy(): def jaccard(x,", "'object' def test__create_hashing_parameters(): n_hashes = 10 myMinHasher = MinHash(n_hash_tables=n_hashes) res", "res = myMinHasher._create_minhash(doc) assert len(res) == n_hashes def test__create_minhash_signatures(): df", "def test__create_hashing_parameters(): n_hashes = 10 myMinHasher = MinHash(n_hash_tables=n_hashes) res =", "len(res) == 1727 res['jaccard_real'] = res.apply(lambda row: jaccard(row['name_1'], row['name_2']), axis=1)", "col in ['hash_0', 'hash_1', 'hash_2']: assert col in df.columns assert", "len(x_tokens.union(y_tokens)) df = load_data() myMinHasher = MinHash(1000) res = myMinHasher.fit_predict(df,", "n_hashes def test__create_minhash_signatures(): df = load_data() myMinHasher = MinHash(3) df", "118, 150, 266] res = myMinHasher._create_minhash(doc) assert len(res) == n_hashes", "10 myMinHasher = MinHash(n_hash_tables=n_hashes) doc = [59, 65, 66, 67,", "'row_number_2', 'name_1', 'name_2', 'jaccard_sim'] assert res['jaccard_sim'].dtype == 'float' def test_fit_predict_accuracy():", "df = load_data() myMinHasher = MinHash(3) df = myMinHasher._sparse_vectorize(df, 'name')", "test_fit_predict(): df = load_data() myMinHasher = MinHash(10) res = myMinHasher.fit_predict(df,", "pyminhash.datasets import load_data def test__sparse_vector(): df = load_data() myMinHasher =", "[59, 65, 66, 67, 118, 150, 266] res = myMinHasher._create_minhash(doc)", "res['diff'] = res['jaccard_real'] - res['jaccard_sim'] assert abs(res['diff'].mean()) < 0.02 assert", "res.columns.tolist() == ['name', 'sparse_vector'] assert res['sparse_vector'].dtype == 'object' def test__create_hashing_parameters():", ">= 0 assert min(res) <= myMinHasher.max_token_value def test__create_minhash(): n_hashes =", "n_hashes = 10 myMinHasher = MinHash(n_hash_tables=n_hashes) res = myMinHasher._create_hashing_parameters() assert", "/ len(x_tokens.union(y_tokens)) df = load_data() myMinHasher = MinHash(1000) res =", "res = myMinHasher.fit_predict(df, 'name') assert len(res) == 1727 res['jaccard_real'] =", "= myMinHasher._create_minhash_signatures(df) for col in ['hash_0', 'hash_1', 'hash_2']: assert col", "myMinHasher = MinHash(10) res = myMinHasher._sparse_vectorize(df, 'name') assert res.columns.tolist() ==" ]
[ "CHANNEL_ARGUMENT_PREFIX: final = '--channels=' LAST_CHECK_ARGUMENT_PREFIX: final = '--last-check=' TWO_WEEKS_IN_DAYS: final", "from tools import localize_time RSS_URL_PREFIX: final = 'https://www.youtube.com/feeds/videos.xml?channel_id={0}' LOCATION_ARGUMENT_PREFIX: final", "14 DEFAULT_LAST_CHECK: final = localize_time(datetime.now() - timedelta(days=TWO_WEEKS_IN_DAYS)) EMPTY: final =", "RSS_URL_PREFIX: final = 'https://www.youtube.com/feeds/videos.xml?channel_id={0}' LOCATION_ARGUMENT_PREFIX: final = '--location=' CHANNEL_ARGUMENT_PREFIX: final", "= '--channels=' LAST_CHECK_ARGUMENT_PREFIX: final = '--last-check=' TWO_WEEKS_IN_DAYS: final = 14", "DEFAULT_LAST_CHECK: final = localize_time(datetime.now() - timedelta(days=TWO_WEEKS_IN_DAYS)) EMPTY: final = ''", "localize_time(datetime.now() - timedelta(days=TWO_WEEKS_IN_DAYS)) EMPTY: final = '' CHANNEL_POSTS_LIMIT: final =", "from datetime import datetime, timedelta from typing import final from", "= '--last-check=' TWO_WEEKS_IN_DAYS: final = 14 DEFAULT_LAST_CHECK: final = localize_time(datetime.now()", "import final from tools import localize_time RSS_URL_PREFIX: final = 'https://www.youtube.com/feeds/videos.xml?channel_id={0}'", "- timedelta(days=TWO_WEEKS_IN_DAYS)) EMPTY: final = '' CHANNEL_POSTS_LIMIT: final = 20", "tools import localize_time RSS_URL_PREFIX: final = 'https://www.youtube.com/feeds/videos.xml?channel_id={0}' LOCATION_ARGUMENT_PREFIX: final =", "timedelta from typing import final from tools import localize_time RSS_URL_PREFIX:", "datetime, timedelta from typing import final from tools import localize_time", "localize_time RSS_URL_PREFIX: final = 'https://www.youtube.com/feeds/videos.xml?channel_id={0}' LOCATION_ARGUMENT_PREFIX: final = '--location=' CHANNEL_ARGUMENT_PREFIX:", "final from tools import localize_time RSS_URL_PREFIX: final = 'https://www.youtube.com/feeds/videos.xml?channel_id={0}' LOCATION_ARGUMENT_PREFIX:", "import localize_time RSS_URL_PREFIX: final = 'https://www.youtube.com/feeds/videos.xml?channel_id={0}' LOCATION_ARGUMENT_PREFIX: final = '--location='", "= '--location=' CHANNEL_ARGUMENT_PREFIX: final = '--channels=' LAST_CHECK_ARGUMENT_PREFIX: final = '--last-check='", "import datetime, timedelta from typing import final from tools import", "'--location=' CHANNEL_ARGUMENT_PREFIX: final = '--channels=' LAST_CHECK_ARGUMENT_PREFIX: final = '--last-check=' TWO_WEEKS_IN_DAYS:", "'--last-check=' TWO_WEEKS_IN_DAYS: final = 14 DEFAULT_LAST_CHECK: final = localize_time(datetime.now() -", "'--channels=' LAST_CHECK_ARGUMENT_PREFIX: final = '--last-check=' TWO_WEEKS_IN_DAYS: final = 14 DEFAULT_LAST_CHECK:", "final = '--last-check=' TWO_WEEKS_IN_DAYS: final = 14 DEFAULT_LAST_CHECK: final =", "= 14 DEFAULT_LAST_CHECK: final = localize_time(datetime.now() - timedelta(days=TWO_WEEKS_IN_DAYS)) EMPTY: final", "LOCATION_ARGUMENT_PREFIX: final = '--location=' CHANNEL_ARGUMENT_PREFIX: final = '--channels=' LAST_CHECK_ARGUMENT_PREFIX: final", "final = '--location=' CHANNEL_ARGUMENT_PREFIX: final = '--channels=' LAST_CHECK_ARGUMENT_PREFIX: final =", "final = 14 DEFAULT_LAST_CHECK: final = localize_time(datetime.now() - timedelta(days=TWO_WEEKS_IN_DAYS)) EMPTY:", "= localize_time(datetime.now() - timedelta(days=TWO_WEEKS_IN_DAYS)) EMPTY: final = '' CHANNEL_POSTS_LIMIT: final", "final = '--channels=' LAST_CHECK_ARGUMENT_PREFIX: final = '--last-check=' TWO_WEEKS_IN_DAYS: final =", "= 'https://www.youtube.com/feeds/videos.xml?channel_id={0}' LOCATION_ARGUMENT_PREFIX: final = '--location=' CHANNEL_ARGUMENT_PREFIX: final = '--channels='", "TWO_WEEKS_IN_DAYS: final = 14 DEFAULT_LAST_CHECK: final = localize_time(datetime.now() - timedelta(days=TWO_WEEKS_IN_DAYS))", "LAST_CHECK_ARGUMENT_PREFIX: final = '--last-check=' TWO_WEEKS_IN_DAYS: final = 14 DEFAULT_LAST_CHECK: final", "final = 'https://www.youtube.com/feeds/videos.xml?channel_id={0}' LOCATION_ARGUMENT_PREFIX: final = '--location=' CHANNEL_ARGUMENT_PREFIX: final =", "typing import final from tools import localize_time RSS_URL_PREFIX: final =", "datetime import datetime, timedelta from typing import final from tools", "'https://www.youtube.com/feeds/videos.xml?channel_id={0}' LOCATION_ARGUMENT_PREFIX: final = '--location=' CHANNEL_ARGUMENT_PREFIX: final = '--channels=' LAST_CHECK_ARGUMENT_PREFIX:", "final = localize_time(datetime.now() - timedelta(days=TWO_WEEKS_IN_DAYS)) EMPTY: final = '' CHANNEL_POSTS_LIMIT:", "from typing import final from tools import localize_time RSS_URL_PREFIX: final" ]
[ "= Module() reset = Signal() busy_n = Signal(reset = 1)", "dataDir = pBus.data.oe read = pBus.read write = pBus.write with", "m.If(qspiFlash.complete | reset): m.d.sync += busy_n.eq(1) with m.Elif(pic.iBus.read): m.d.sync +=", "= QSPIBus(resourceName = ('spi_flash_4x', 0)) m.submodules.pic = pic = ResetInserter(reset)(EnableInserter(busy_n)(PIC16()))", "from .soc.busses.qspi import QSPIBus m = Module() reset = Signal()", "run.o.eq(qspiFlash.ready & busy_n), qspiFlash.address[0].eq(0), qspiFlash.address[1:].eq(pic.iBus.address), pic.iBus.data.eq(qspiFlash.data), qspiFlash.read.eq(pic.iBus.read), addr.eq(pic.pBus.address), read.eq(pic.pBus.read), pic.pBus.readData.eq(dataIn),", "import QSPIBus m = Module() reset = Signal() busy_n =", "= ResetInserter(reset)(EnableInserter(busy_n)(PIC16())) run = platform.request('run', 0) pBus = platform.request('p_bus', 0)", "busy_n.eq(1) with m.Elif(pic.iBus.read): m.d.sync += busy_n.eq(0) m.d.comb += [ reset.eq(~qspiFlash.ready),", "busy_n), qspiFlash.address[0].eq(0), qspiFlash.address[1:].eq(pic.iBus.address), pic.iBus.data.eq(qspiFlash.data), qspiFlash.read.eq(pic.iBus.read), addr.eq(pic.pBus.address), read.eq(pic.pBus.read), pic.pBus.readData.eq(dataIn), write.eq(pic.pBus.write), dataOut.eq(pic.pBus.writeData),", "# SPDX-License-Identifier: BSD-3-Clause from amaranth import Elaboratable, Module, Signal, ResetInserter,", "0)) m.submodules.pic = pic = ResetInserter(reset)(EnableInserter(busy_n)(PIC16())) run = platform.request('run', 0)", "from amaranth import Elaboratable, Module, Signal, ResetInserter, EnableInserter __all__ =", "import PIC16 from .soc.busses.qspi import QSPIBus m = Module() reset", "PIC16 from .soc.busses.qspi import QSPIBus m = Module() reset =", "addr.eq(pic.pBus.address), read.eq(pic.pBus.read), pic.pBus.readData.eq(dataIn), write.eq(pic.pBus.write), dataOut.eq(pic.pBus.writeData), dataDir.eq(pic.pBus.write), ] return m def", "m.d.sync += busy_n.eq(1) with m.Elif(pic.iBus.read): m.d.sync += busy_n.eq(0) m.d.comb +=", "busy_n = Signal(reset = 1) m.submodules.qspiFlash = qspiFlash = QSPIBus(resourceName", "1) m.submodules.qspiFlash = qspiFlash = QSPIBus(resourceName = ('spi_flash_4x', 0)) m.submodules.pic", ".soc.busses.qspi import QSPIBus m = Module() reset = Signal() busy_n", "& busy_n), qspiFlash.address[0].eq(0), qspiFlash.address[1:].eq(pic.iBus.address), pic.iBus.data.eq(qspiFlash.data), qspiFlash.read.eq(pic.iBus.read), addr.eq(pic.pBus.address), read.eq(pic.pBus.read), pic.pBus.readData.eq(dataIn), write.eq(pic.pBus.write),", "( 'PIC16Caravel', ) class PIC16Caravel(Elaboratable): def elaborate(self, platform): from .pic16", "SPDX-License-Identifier: BSD-3-Clause from amaranth import Elaboratable, Module, Signal, ResetInserter, EnableInserter", "0) addr = pBus.addr.o dataIn = pBus.data.i dataOut = pBus.data.o", "= pBus.data.o dataDir = pBus.data.oe read = pBus.read write =", "platform): from .pic16 import PIC16 from .soc.busses.qspi import QSPIBus m", "PIC16Caravel(Elaboratable): def elaborate(self, platform): from .pic16 import PIC16 from .soc.busses.qspi", "def elaborate(self, platform): from .pic16 import PIC16 from .soc.busses.qspi import", "from .pic16 import PIC16 from .soc.busses.qspi import QSPIBus m =", "Module() reset = Signal() busy_n = Signal(reset = 1) m.submodules.qspiFlash", "m.submodules.qspiFlash = qspiFlash = QSPIBus(resourceName = ('spi_flash_4x', 0)) m.submodules.pic =", "BSD-3-Clause from amaranth import Elaboratable, Module, Signal, ResetInserter, EnableInserter __all__", "= pBus.data.i dataOut = pBus.data.o dataDir = pBus.data.oe read =", "dataIn = pBus.data.i dataOut = pBus.data.o dataDir = pBus.data.oe read", "= platform.request('run', 0) pBus = platform.request('p_bus', 0) addr = pBus.addr.o", "Module, Signal, ResetInserter, EnableInserter __all__ = ( 'PIC16Caravel', ) class", "pBus.data.o dataDir = pBus.data.oe read = pBus.read write = pBus.write", "= pBus.read write = pBus.write with m.If(qspiFlash.complete | reset): m.d.sync", "reset = Signal() busy_n = Signal(reset = 1) m.submodules.qspiFlash =", "m.d.sync += busy_n.eq(0) m.d.comb += [ reset.eq(~qspiFlash.ready), run.o.eq(qspiFlash.ready & busy_n),", "reset): m.d.sync += busy_n.eq(1) with m.Elif(pic.iBus.read): m.d.sync += busy_n.eq(0) m.d.comb", ".pic16 import PIC16 from .soc.busses.qspi import QSPIBus m = Module()", "= Signal() busy_n = Signal(reset = 1) m.submodules.qspiFlash = qspiFlash", "+= busy_n.eq(0) m.d.comb += [ reset.eq(~qspiFlash.ready), run.o.eq(qspiFlash.ready & busy_n), qspiFlash.address[0].eq(0),", "m = Module() reset = Signal() busy_n = Signal(reset =", "import Elaboratable, Module, Signal, ResetInserter, EnableInserter __all__ = ( 'PIC16Caravel',", "('spi_flash_4x', 0)) m.submodules.pic = pic = ResetInserter(reset)(EnableInserter(busy_n)(PIC16())) run = platform.request('run',", "m.submodules.pic = pic = ResetInserter(reset)(EnableInserter(busy_n)(PIC16())) run = platform.request('run', 0) pBus", "'PIC16Caravel', ) class PIC16Caravel(Elaboratable): def elaborate(self, platform): from .pic16 import", "pic = ResetInserter(reset)(EnableInserter(busy_n)(PIC16())) run = platform.request('run', 0) pBus = platform.request('p_bus',", "pic.iBus.data.eq(qspiFlash.data), qspiFlash.read.eq(pic.iBus.read), addr.eq(pic.pBus.address), read.eq(pic.pBus.read), pic.pBus.readData.eq(dataIn), write.eq(pic.pBus.write), dataOut.eq(pic.pBus.writeData), dataDir.eq(pic.pBus.write), ] return", "= platform.request('p_bus', 0) addr = pBus.addr.o dataIn = pBus.data.i dataOut", "qspiFlash.address[0].eq(0), qspiFlash.address[1:].eq(pic.iBus.address), pic.iBus.data.eq(qspiFlash.data), qspiFlash.read.eq(pic.iBus.read), addr.eq(pic.pBus.address), read.eq(pic.pBus.read), pic.pBus.readData.eq(dataIn), write.eq(pic.pBus.write), dataOut.eq(pic.pBus.writeData), dataDir.eq(pic.pBus.write),", "QSPIBus m = Module() reset = Signal() busy_n = Signal(reset", "pBus.data.oe read = pBus.read write = pBus.write with m.If(qspiFlash.complete |", "busy_n.eq(0) m.d.comb += [ reset.eq(~qspiFlash.ready), run.o.eq(qspiFlash.ready & busy_n), qspiFlash.address[0].eq(0), qspiFlash.address[1:].eq(pic.iBus.address),", "read.eq(pic.pBus.read), pic.pBus.readData.eq(dataIn), write.eq(pic.pBus.write), dataOut.eq(pic.pBus.writeData), dataDir.eq(pic.pBus.write), ] return m def get_ports(self):", "Elaboratable, Module, Signal, ResetInserter, EnableInserter __all__ = ( 'PIC16Caravel', )", "pBus = platform.request('p_bus', 0) addr = pBus.addr.o dataIn = pBus.data.i", "ResetInserter(reset)(EnableInserter(busy_n)(PIC16())) run = platform.request('run', 0) pBus = platform.request('p_bus', 0) addr", "Signal() busy_n = Signal(reset = 1) m.submodules.qspiFlash = qspiFlash =", "= ( 'PIC16Caravel', ) class PIC16Caravel(Elaboratable): def elaborate(self, platform): from", "= pBus.addr.o dataIn = pBus.data.i dataOut = pBus.data.o dataDir =", "= qspiFlash = QSPIBus(resourceName = ('spi_flash_4x', 0)) m.submodules.pic = pic", "with m.If(qspiFlash.complete | reset): m.d.sync += busy_n.eq(1) with m.Elif(pic.iBus.read): m.d.sync", "read = pBus.read write = pBus.write with m.If(qspiFlash.complete | reset):", "write = pBus.write with m.If(qspiFlash.complete | reset): m.d.sync += busy_n.eq(1)", "pBus.read write = pBus.write with m.If(qspiFlash.complete | reset): m.d.sync +=", "= 1) m.submodules.qspiFlash = qspiFlash = QSPIBus(resourceName = ('spi_flash_4x', 0))", "0) pBus = platform.request('p_bus', 0) addr = pBus.addr.o dataIn =", "+= busy_n.eq(1) with m.Elif(pic.iBus.read): m.d.sync += busy_n.eq(0) m.d.comb += [", "__all__ = ( 'PIC16Caravel', ) class PIC16Caravel(Elaboratable): def elaborate(self, platform):", "= pic = ResetInserter(reset)(EnableInserter(busy_n)(PIC16())) run = platform.request('run', 0) pBus =", "reset.eq(~qspiFlash.ready), run.o.eq(qspiFlash.ready & busy_n), qspiFlash.address[0].eq(0), qspiFlash.address[1:].eq(pic.iBus.address), pic.iBus.data.eq(qspiFlash.data), qspiFlash.read.eq(pic.iBus.read), addr.eq(pic.pBus.address), read.eq(pic.pBus.read),", "run = platform.request('run', 0) pBus = platform.request('p_bus', 0) addr =", "= Signal(reset = 1) m.submodules.qspiFlash = qspiFlash = QSPIBus(resourceName =", "Signal, ResetInserter, EnableInserter __all__ = ( 'PIC16Caravel', ) class PIC16Caravel(Elaboratable):", "pBus.addr.o dataIn = pBus.data.i dataOut = pBus.data.o dataDir = pBus.data.oe", "qspiFlash.address[1:].eq(pic.iBus.address), pic.iBus.data.eq(qspiFlash.data), qspiFlash.read.eq(pic.iBus.read), addr.eq(pic.pBus.address), read.eq(pic.pBus.read), pic.pBus.readData.eq(dataIn), write.eq(pic.pBus.write), dataOut.eq(pic.pBus.writeData), dataDir.eq(pic.pBus.write), ]", "| reset): m.d.sync += busy_n.eq(1) with m.Elif(pic.iBus.read): m.d.sync += busy_n.eq(0)", "amaranth import Elaboratable, Module, Signal, ResetInserter, EnableInserter __all__ = (", ") class PIC16Caravel(Elaboratable): def elaborate(self, platform): from .pic16 import PIC16", "QSPIBus(resourceName = ('spi_flash_4x', 0)) m.submodules.pic = pic = ResetInserter(reset)(EnableInserter(busy_n)(PIC16())) run", "qspiFlash = QSPIBus(resourceName = ('spi_flash_4x', 0)) m.submodules.pic = pic =", "class PIC16Caravel(Elaboratable): def elaborate(self, platform): from .pic16 import PIC16 from", "EnableInserter __all__ = ( 'PIC16Caravel', ) class PIC16Caravel(Elaboratable): def elaborate(self,", "[ reset.eq(~qspiFlash.ready), run.o.eq(qspiFlash.ready & busy_n), qspiFlash.address[0].eq(0), qspiFlash.address[1:].eq(pic.iBus.address), pic.iBus.data.eq(qspiFlash.data), qspiFlash.read.eq(pic.iBus.read), addr.eq(pic.pBus.address),", "write.eq(pic.pBus.write), dataOut.eq(pic.pBus.writeData), dataDir.eq(pic.pBus.write), ] return m def get_ports(self): return []", "platform.request('run', 0) pBus = platform.request('p_bus', 0) addr = pBus.addr.o dataIn", "ResetInserter, EnableInserter __all__ = ( 'PIC16Caravel', ) class PIC16Caravel(Elaboratable): def", "qspiFlash.read.eq(pic.iBus.read), addr.eq(pic.pBus.address), read.eq(pic.pBus.read), pic.pBus.readData.eq(dataIn), write.eq(pic.pBus.write), dataOut.eq(pic.pBus.writeData), dataDir.eq(pic.pBus.write), ] return m", "pBus.data.i dataOut = pBus.data.o dataDir = pBus.data.oe read = pBus.read", "m.d.comb += [ reset.eq(~qspiFlash.ready), run.o.eq(qspiFlash.ready & busy_n), qspiFlash.address[0].eq(0), qspiFlash.address[1:].eq(pic.iBus.address), pic.iBus.data.eq(qspiFlash.data),", "with m.Elif(pic.iBus.read): m.d.sync += busy_n.eq(0) m.d.comb += [ reset.eq(~qspiFlash.ready), run.o.eq(qspiFlash.ready", "platform.request('p_bus', 0) addr = pBus.addr.o dataIn = pBus.data.i dataOut =", "+= [ reset.eq(~qspiFlash.ready), run.o.eq(qspiFlash.ready & busy_n), qspiFlash.address[0].eq(0), qspiFlash.address[1:].eq(pic.iBus.address), pic.iBus.data.eq(qspiFlash.data), qspiFlash.read.eq(pic.iBus.read),", "addr = pBus.addr.o dataIn = pBus.data.i dataOut = pBus.data.o dataDir", "= ('spi_flash_4x', 0)) m.submodules.pic = pic = ResetInserter(reset)(EnableInserter(busy_n)(PIC16())) run =", "dataOut = pBus.data.o dataDir = pBus.data.oe read = pBus.read write", "elaborate(self, platform): from .pic16 import PIC16 from .soc.busses.qspi import QSPIBus", "= pBus.write with m.If(qspiFlash.complete | reset): m.d.sync += busy_n.eq(1) with", "pic.pBus.readData.eq(dataIn), write.eq(pic.pBus.write), dataOut.eq(pic.pBus.writeData), dataDir.eq(pic.pBus.write), ] return m def get_ports(self): return", "= pBus.data.oe read = pBus.read write = pBus.write with m.If(qspiFlash.complete", "pBus.write with m.If(qspiFlash.complete | reset): m.d.sync += busy_n.eq(1) with m.Elif(pic.iBus.read):", "m.Elif(pic.iBus.read): m.d.sync += busy_n.eq(0) m.d.comb += [ reset.eq(~qspiFlash.ready), run.o.eq(qspiFlash.ready &", "Signal(reset = 1) m.submodules.qspiFlash = qspiFlash = QSPIBus(resourceName = ('spi_flash_4x'," ]
[ "\"level\": level_count += 1 if role.name == \"country\": country_count +=", "0 valor_count = 0 ign_count = 0 tc_count = 0", "= 0 level_count = 0 country_count = 0 profile_count =", "== \"instinct\": instinct_count += 1 if role.name == \"mystic\": mystic_count", "import discord class Stats(commands.Cog): def __init__(self, bot): self.bot = bot", "Nexus Profiles: {profile_count}'] embed = discord.Embed(color=discord.Color.green()) embed.set_author(name=ctx.guild.name, icon_url=ctx.guild.icon_url) embed.add_field(name='Server Stats:',", "embed = discord.Embed(color=discord.Color.green()) embed.set_author(name=ctx.guild.name, icon_url=ctx.guild.icon_url) embed.add_field(name='Server Stats:', value='\\n'.join(values), inline=False) await", "completed Nexus Profiles: {profile_count}'] embed = discord.Embed(color=discord.Color.green()) embed.set_author(name=ctx.guild.name, icon_url=ctx.guild.icon_url) embed.add_field(name='Server", "1 if role.name == \"country\": country_count += 1 if role.name", "\"Member\": member_role_count += 1 if role.name == \"instinct\": instinct_count +=", "member in members: if not member.bot: member_count += 1 for", "role.name == \"ign\": ign_count += 1 if role.name == \"tc\":", "Team Instinct: {instinct_count}', f'Members on Team Mystic: {mystic_count}', f'Members on", "== \"mystic\": mystic_count += 1 if role.name == \"valor\": valor_count", "__init__(self, bot): self.bot = bot @commands.command() @commands.has_permissions(manage_channels=True) async def stats(self,", "f'Members with TC set: {tc_count}', f'Members with level set: {level_count}',", "commands import discord class Stats(commands.Cog): def __init__(self, bot): self.bot =", "self.bot = bot @commands.command() @commands.has_permissions(manage_channels=True) async def stats(self, ctx): members", "{member_role_count}', f'Members on Team Instinct: {instinct_count}', f'Members on Team Mystic:", "embed.set_author(name=ctx.guild.name, icon_url=ctx.guild.icon_url) embed.add_field(name='Server Stats:', value='\\n'.join(values), inline=False) await ctx.send(embed=embed) @stats.error async", "0 for member in members: if not member.bot: member_count +=", "if role.name == \"tc\": tc_count += 1 if role.name ==", "\"mystic\": mystic_count += 1 if role.name == \"valor\": valor_count +=", "icon_url=ctx.guild.icon_url) embed.add_field(name='Server Stats:', value='\\n'.join(values), inline=False) await ctx.send(embed=embed) @stats.error async def", "instinct_count += 1 if role.name == \"mystic\": mystic_count += 1", "from discord.ext import commands import discord class Stats(commands.Cog): def __init__(self,", "mystic_count = 0 valor_count = 0 ign_count = 0 tc_count", "if role.name == \"valor\": valor_count += 1 if role.name ==", "+= 1 if role.name == \"ign\": ign_count += 1 if", "= 0 member_role_count = 0 instinct_count = 0 mystic_count =", "discord.ext import commands import discord class Stats(commands.Cog): def __init__(self, bot):", "await ctx.guild.fetch_members(limit=None).flatten() member_count = 0 member_role_count = 0 instinct_count =", "{member_count}', f'Members Role: {member_role_count}', f'Members on Team Instinct: {instinct_count}', f'Members", "{level_count}', f'Members with country set: {country_count}', f'Members with completed Nexus", "ctx, error): if isinstance(error, commands.MissingPermissions): await ctx.send(\"Sorry, you can't run", "\"tc\": tc_count += 1 if role.name == \"level\": level_count +=", "1 if role.name == \"ign\": ign_count += 1 if role.name", "Instinct: {instinct_count}', f'Members on Team Mystic: {mystic_count}', f'Members on Team", "ctx.send(\"Sorry, you can't run this command\") else: raise error def", "members = await ctx.guild.fetch_members(limit=None).flatten() member_count = 0 member_role_count = 0", "+= 1 if role.name == \"mystic\": mystic_count += 1 if", "if role.name == \"mystic\": mystic_count += 1 if role.name ==", "with level set: {level_count}', f'Members with country set: {country_count}', f'Members", "== \"Member\": member_role_count += 1 if role.name == \"instinct\": instinct_count", "{tc_count}', f'Members with level set: {level_count}', f'Members with country set:", "1 if role.name == \"tc\": tc_count += 1 if role.name", "ign_count = 0 tc_count = 0 level_count = 0 country_count", "level_count += 1 if role.name == \"country\": country_count += 1", "error): if isinstance(error, commands.MissingPermissions): await ctx.send(\"Sorry, you can't run this", "Team Valor: {valor_count}', f'Members with IGN set: {ign_count}', f'Members with", "f'Members with IGN set: {ign_count}', f'Members with TC set: {tc_count}',", "commands.MissingPermissions): await ctx.send(\"Sorry, you can't run this command\") else: raise", "def __init__(self, bot): self.bot = bot @commands.command() @commands.has_permissions(manage_channels=True) async def", "@commands.command() @commands.has_permissions(manage_channels=True) async def stats(self, ctx): members = await ctx.guild.fetch_members(limit=None).flatten()", "bot): self.bot = bot @commands.command() @commands.has_permissions(manage_channels=True) async def stats(self, ctx):", "= 0 tc_count = 0 level_count = 0 country_count =", "not member.bot: member_count += 1 for role in member.roles: if", "\"country\": country_count += 1 if role.name == \"profile\": profile_count +=", "if role.name == \"level\": level_count += 1 if role.name ==", "if role.name == \"ign\": ign_count += 1 if role.name ==", "1 if role.name == \"level\": level_count += 1 if role.name", "ign_count += 1 if role.name == \"tc\": tc_count += 1", "in member.roles: if role.name == \"Member\": member_role_count += 1 if", "role in member.roles: if role.name == \"Member\": member_role_count += 1", "Stats:', value='\\n'.join(values), inline=False) await ctx.send(embed=embed) @stats.error async def permission_error(self, ctx,", "== \"profile\": profile_count += 1 values = [f'Members: {member_count}', f'Members", "[f'Members: {member_count}', f'Members Role: {member_role_count}', f'Members on Team Instinct: {instinct_count}',", "= discord.Embed(color=discord.Color.green()) embed.set_author(name=ctx.guild.name, icon_url=ctx.guild.icon_url) embed.add_field(name='Server Stats:', value='\\n'.join(values), inline=False) await ctx.send(embed=embed)", "if isinstance(error, commands.MissingPermissions): await ctx.send(\"Sorry, you can't run this command\")", "+= 1 if role.name == \"level\": level_count += 1 if", "role.name == \"level\": level_count += 1 if role.name == \"country\":", "Role: {member_role_count}', f'Members on Team Instinct: {instinct_count}', f'Members on Team", "f'Members with level set: {level_count}', f'Members with country set: {country_count}',", "member_role_count += 1 if role.name == \"instinct\": instinct_count += 1", "1 if role.name == \"profile\": profile_count += 1 values =", "if role.name == \"instinct\": instinct_count += 1 if role.name ==", "member.roles: if role.name == \"Member\": member_role_count += 1 if role.name", "member.bot: member_count += 1 for role in member.roles: if role.name", "f'Members with completed Nexus Profiles: {profile_count}'] embed = discord.Embed(color=discord.Color.green()) embed.set_author(name=ctx.guild.name,", "member_count = 0 member_role_count = 0 instinct_count = 0 mystic_count", "with completed Nexus Profiles: {profile_count}'] embed = discord.Embed(color=discord.Color.green()) embed.set_author(name=ctx.guild.name, icon_url=ctx.guild.icon_url)", "ctx.send(embed=embed) @stats.error async def permission_error(self, ctx, error): if isinstance(error, commands.MissingPermissions):", "tc_count += 1 if role.name == \"level\": level_count += 1", "if role.name == \"Member\": member_role_count += 1 if role.name ==", "discord.Embed(color=discord.Color.green()) embed.set_author(name=ctx.guild.name, icon_url=ctx.guild.icon_url) embed.add_field(name='Server Stats:', value='\\n'.join(values), inline=False) await ctx.send(embed=embed) @stats.error", "role.name == \"Member\": member_role_count += 1 if role.name == \"instinct\":", "{mystic_count}', f'Members on Team Valor: {valor_count}', f'Members with IGN set:", "with country set: {country_count}', f'Members with completed Nexus Profiles: {profile_count}']", "country_count += 1 if role.name == \"profile\": profile_count += 1", "class Stats(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command() @commands.has_permissions(manage_channels=True)", "set: {country_count}', f'Members with completed Nexus Profiles: {profile_count}'] embed =", "{country_count}', f'Members with completed Nexus Profiles: {profile_count}'] embed = discord.Embed(color=discord.Color.green())", "<reponame>est73/raid-shack<gh_stars>0 from discord.ext import commands import discord class Stats(commands.Cog): def", "async def permission_error(self, ctx, error): if isinstance(error, commands.MissingPermissions): await ctx.send(\"Sorry,", "value='\\n'.join(values), inline=False) await ctx.send(embed=embed) @stats.error async def permission_error(self, ctx, error):", "set: {tc_count}', f'Members with level set: {level_count}', f'Members with country", "instinct_count = 0 mystic_count = 0 valor_count = 0 ign_count", "role.name == \"valor\": valor_count += 1 if role.name == \"ign\":", "if role.name == \"country\": country_count += 1 if role.name ==", "= 0 ign_count = 0 tc_count = 0 level_count =", "== \"valor\": valor_count += 1 if role.name == \"ign\": ign_count", "1 if role.name == \"mystic\": mystic_count += 1 if role.name", "f'Members with country set: {country_count}', f'Members with completed Nexus Profiles:", "can't run this command\") else: raise error def setup(bot): bot.add_cog(Stats(bot))", "Team Mystic: {mystic_count}', f'Members on Team Valor: {valor_count}', f'Members with", "== \"tc\": tc_count += 1 if role.name == \"level\": level_count", "isinstance(error, commands.MissingPermissions): await ctx.send(\"Sorry, you can't run this command\") else:", "0 ign_count = 0 tc_count = 0 level_count = 0", "0 mystic_count = 0 valor_count = 0 ign_count = 0", "= 0 country_count = 0 profile_count = 0 for member", "+= 1 for role in member.roles: if role.name == \"Member\":", "country_count = 0 profile_count = 0 for member in members:", "level set: {level_count}', f'Members with country set: {country_count}', f'Members with", "1 if role.name == \"instinct\": instinct_count += 1 if role.name", "profile_count = 0 for member in members: if not member.bot:", "f'Members on Team Instinct: {instinct_count}', f'Members on Team Mystic: {mystic_count}',", "on Team Instinct: {instinct_count}', f'Members on Team Mystic: {mystic_count}', f'Members", "ctx.guild.fetch_members(limit=None).flatten() member_count = 0 member_role_count = 0 instinct_count = 0", "= await ctx.guild.fetch_members(limit=None).flatten() member_count = 0 member_role_count = 0 instinct_count", "{ign_count}', f'Members with TC set: {tc_count}', f'Members with level set:", "valor_count += 1 if role.name == \"ign\": ign_count += 1", "member_count += 1 for role in member.roles: if role.name ==", "role.name == \"instinct\": instinct_count += 1 if role.name == \"mystic\":", "values = [f'Members: {member_count}', f'Members Role: {member_role_count}', f'Members on Team", "= 0 profile_count = 0 for member in members: if", "f'Members on Team Valor: {valor_count}', f'Members with IGN set: {ign_count}',", "IGN set: {ign_count}', f'Members with TC set: {tc_count}', f'Members with", "= bot @commands.command() @commands.has_permissions(manage_channels=True) async def stats(self, ctx): members =", "set: {ign_count}', f'Members with TC set: {tc_count}', f'Members with level", "bot @commands.command() @commands.has_permissions(manage_channels=True) async def stats(self, ctx): members = await", "\"profile\": profile_count += 1 values = [f'Members: {member_count}', f'Members Role:", "def stats(self, ctx): members = await ctx.guild.fetch_members(limit=None).flatten() member_count = 0", "mystic_count += 1 if role.name == \"valor\": valor_count += 1", "{instinct_count}', f'Members on Team Mystic: {mystic_count}', f'Members on Team Valor:", "= 0 for member in members: if not member.bot: member_count", "f'Members on Team Mystic: {mystic_count}', f'Members on Team Valor: {valor_count}',", "Mystic: {mystic_count}', f'Members on Team Valor: {valor_count}', f'Members with IGN", "+= 1 values = [f'Members: {member_count}', f'Members Role: {member_role_count}', f'Members", "on Team Mystic: {mystic_count}', f'Members on Team Valor: {valor_count}', f'Members", "Profiles: {profile_count}'] embed = discord.Embed(color=discord.Color.green()) embed.set_author(name=ctx.guild.name, icon_url=ctx.guild.icon_url) embed.add_field(name='Server Stats:', value='\\n'.join(values),", "role.name == \"mystic\": mystic_count += 1 if role.name == \"valor\":", "ctx): members = await ctx.guild.fetch_members(limit=None).flatten() member_count = 0 member_role_count =", "member_role_count = 0 instinct_count = 0 mystic_count = 0 valor_count", "await ctx.send(\"Sorry, you can't run this command\") else: raise error", "TC set: {tc_count}', f'Members with level set: {level_count}', f'Members with", "level_count = 0 country_count = 0 profile_count = 0 for", "if role.name == \"profile\": profile_count += 1 values = [f'Members:", "async def stats(self, ctx): members = await ctx.guild.fetch_members(limit=None).flatten() member_count =", "import commands import discord class Stats(commands.Cog): def __init__(self, bot): self.bot", "0 instinct_count = 0 mystic_count = 0 valor_count = 0", "role.name == \"profile\": profile_count += 1 values = [f'Members: {member_count}',", "Stats(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command() @commands.has_permissions(manage_channels=True) async", "profile_count += 1 values = [f'Members: {member_count}', f'Members Role: {member_role_count}',", "0 level_count = 0 country_count = 0 profile_count = 0", "+= 1 if role.name == \"country\": country_count += 1 if", "await ctx.send(embed=embed) @stats.error async def permission_error(self, ctx, error): if isinstance(error,", "role.name == \"country\": country_count += 1 if role.name == \"profile\":", "inline=False) await ctx.send(embed=embed) @stats.error async def permission_error(self, ctx, error): if", "if not member.bot: member_count += 1 for role in member.roles:", "+= 1 if role.name == \"tc\": tc_count += 1 if", "1 values = [f'Members: {member_count}', f'Members Role: {member_role_count}', f'Members on", "for member in members: if not member.bot: member_count += 1", "= 0 valor_count = 0 ign_count = 0 tc_count =", "country set: {country_count}', f'Members with completed Nexus Profiles: {profile_count}'] embed", "tc_count = 0 level_count = 0 country_count = 0 profile_count", "you can't run this command\") else: raise error def setup(bot):", "{valor_count}', f'Members with IGN set: {ign_count}', f'Members with TC set:", "@stats.error async def permission_error(self, ctx, error): if isinstance(error, commands.MissingPermissions): await", "\"ign\": ign_count += 1 if role.name == \"tc\": tc_count +=", "discord class Stats(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command()", "valor_count = 0 ign_count = 0 tc_count = 0 level_count", "== \"level\": level_count += 1 if role.name == \"country\": country_count", "= 0 mystic_count = 0 valor_count = 0 ign_count =", "+= 1 if role.name == \"valor\": valor_count += 1 if", "+= 1 if role.name == \"instinct\": instinct_count += 1 if", "= [f'Members: {member_count}', f'Members Role: {member_role_count}', f'Members on Team Instinct:", "for role in member.roles: if role.name == \"Member\": member_role_count +=", "0 profile_count = 0 for member in members: if not", "\"valor\": valor_count += 1 if role.name == \"ign\": ign_count +=", "+= 1 if role.name == \"profile\": profile_count += 1 values", "members: if not member.bot: member_count += 1 for role in", "Valor: {valor_count}', f'Members with IGN set: {ign_count}', f'Members with TC", "= 0 instinct_count = 0 mystic_count = 0 valor_count =", "role.name == \"tc\": tc_count += 1 if role.name == \"level\":", "set: {level_count}', f'Members with country set: {country_count}', f'Members with completed", "1 for role in member.roles: if role.name == \"Member\": member_role_count", "on Team Valor: {valor_count}', f'Members with IGN set: {ign_count}', f'Members", "0 member_role_count = 0 instinct_count = 0 mystic_count = 0", "stats(self, ctx): members = await ctx.guild.fetch_members(limit=None).flatten() member_count = 0 member_role_count", "@commands.has_permissions(manage_channels=True) async def stats(self, ctx): members = await ctx.guild.fetch_members(limit=None).flatten() member_count", "0 tc_count = 0 level_count = 0 country_count = 0", "\"instinct\": instinct_count += 1 if role.name == \"mystic\": mystic_count +=", "f'Members Role: {member_role_count}', f'Members on Team Instinct: {instinct_count}', f'Members on", "with IGN set: {ign_count}', f'Members with TC set: {tc_count}', f'Members", "== \"country\": country_count += 1 if role.name == \"profile\": profile_count", "1 if role.name == \"valor\": valor_count += 1 if role.name", "def permission_error(self, ctx, error): if isinstance(error, commands.MissingPermissions): await ctx.send(\"Sorry, you", "0 country_count = 0 profile_count = 0 for member in", "permission_error(self, ctx, error): if isinstance(error, commands.MissingPermissions): await ctx.send(\"Sorry, you can't", "in members: if not member.bot: member_count += 1 for role", "== \"ign\": ign_count += 1 if role.name == \"tc\": tc_count", "embed.add_field(name='Server Stats:', value='\\n'.join(values), inline=False) await ctx.send(embed=embed) @stats.error async def permission_error(self,", "{profile_count}'] embed = discord.Embed(color=discord.Color.green()) embed.set_author(name=ctx.guild.name, icon_url=ctx.guild.icon_url) embed.add_field(name='Server Stats:', value='\\n'.join(values), inline=False)", "with TC set: {tc_count}', f'Members with level set: {level_count}', f'Members" ]
[ "'.join(value for key, value in readonly_copy.stream()) assert values == 'test", "values == '' b.close_writer(do_sync=False) filename = prefix + '.mrsx' path", "This is' b.close_writer(do_sync=False) filename = prefix + '.mrsb' path =", "format=HexWriter) prefix = b.prefix() assert prefix == 'source_7_split_1_' listdir =", "'This'), (2, 'is')], write_only=True) values = ' '.join(value for key,", "values = ' '.join(value for key, value in b) assert", "= tmpdir.listdir() assert listdir == [] b.addpair((1, 2)) filename =", "= b.readonly_copy() assert readonly_copy.url == path values = ' '.join(value", "= tmpdir.listdir() assert listdir == [path] readonly_copy = b.readonly_copy() assert", "= tmpdir.listdir() assert listdir == [] b.addpair((4, 'test')) b.collect([(3, 'a'),", "assert values == 'test a This is' b.sort() values =", "readonly_copy = b.readonly_copy() assert readonly_copy.url == path def test_roundtrip(tmpdir): b", "WriteBucket(7, 1, dir=tmpdir.strpath, format=HexWriter) prefix = b.prefix() assert prefix ==", "prefix == 'source_2_split_4_' listdir = tmpdir.listdir() assert listdir == []", "value in b) assert values == '' readonly_copy = b.readonly_copy()", "b = WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter) prefix = b.prefix() assert", "= tmpdir.join(filename).strpath listdir = tmpdir.listdir() assert listdir == [path] readonly_copy", "a test' def test_write_only(): b = WriteBucket(0, 0) b.addpair((4, 'test'),", "def test_writebucket(): b = WriteBucket(0, 0) b.addpair((4, 'test')) b.collect([(3, 'a'),", "filename = prefix + '.mrsx' path = tmpdir.join(filename).strpath listdir =", "listdir = tmpdir.listdir() assert listdir == [] # vim: et", "listdir == [] def test_roundtrip_write_only(tmpdir): b = WriteBucket(7, 1, dir=tmpdir.strpath,", "is' b.sort() values = ' '.join(value for key, value in", "is' values = ' '.join(value for key, value in readonly_copy.stream())", "'.mrsx' path = tmpdir.join(filename).strpath listdir = tmpdir.listdir() assert listdir ==", "'test a This is' b.sort() values = ' '.join(value for", "b.readonly_copy() assert readonly_copy.url == path def test_roundtrip(tmpdir): b = WriteBucket(2,", "[] b.addpair((4, 'test'), write_only=True) b.collect([(3, 'a'), (1, 'This'), (2, 'is')],", "b) assert values == 'test a This is' b.close_writer(do_sync=False) filename", "WriteBucket from mrs import BinWriter, HexWriter def test_writebucket(): b =", "This is' b.sort() values = ' '.join(value for key, value", "'is')], write_only=True) values = ' '.join(value for key, value in", "values == '' readonly_copy = b.readonly_copy() assert readonly_copy.url is None", "= WriteBucket(7, 1, dir=tmpdir.strpath, format=HexWriter) prefix = b.prefix() assert prefix", "= prefix + '.mrsb' path = tmpdir.join(filename).strpath listdir = tmpdir.listdir()", "'' values = ' '.join(value for key, value in readonly_copy.stream())", "filename = prefix + '.mrsb' path = tmpdir.join(filename).strpath listdir =", "mrs.bucket import WriteBucket from mrs import BinWriter, HexWriter def test_writebucket():", "is a test' def test_write_only(): b = WriteBucket(0, 0) b.addpair((4,", "'.mrsb' path = tmpdir.join(filename).strpath listdir = tmpdir.listdir() assert listdir ==", "== 'source_2_split_4_' listdir = tmpdir.listdir() assert listdir == [] b.addpair((1,", "= ' '.join(value for key, value in readonly_copy.stream()) assert values", "readonly_copy) assert values == '' values = ' '.join(value for", "None def test_writing(tmpdir): b = WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter) prefix", "values = ' '.join(value for key, value in readonly_copy.stream()) assert", "test' def test_write_only(): b = WriteBucket(0, 0) b.addpair((4, 'test'), write_only=True)", "= b.prefix() assert prefix == 'source_7_split_1_' listdir = tmpdir.listdir() assert", "'is')]) values = ' '.join(value for key, value in b)", "key, value in b) assert values == '' readonly_copy =", "'This'), (2, 'is')]) values = ' '.join(value for key, value", "tmpdir.listdir() assert listdir == [] def test_roundtrip_write_only(tmpdir): b = WriteBucket(7,", "'' b.close_writer(do_sync=False) filename = prefix + '.mrsx' path = tmpdir.join(filename).strpath", "format=BinWriter) prefix = b.prefix() assert prefix == 'source_2_split_4_' listdir =", "b.clean() listdir = tmpdir.listdir() assert listdir == [] def test_roundtrip_write_only(tmpdir):", "HexWriter def test_writebucket(): b = WriteBucket(0, 0) b.addpair((4, 'test')) b.collect([(3,", "'' readonly_copy = b.readonly_copy() assert readonly_copy.url is None def test_writing(tmpdir):", "listdir = tmpdir.listdir() assert listdir == [] b.addpair((4, 'test'), write_only=True)", "a This is' b.close_writer(do_sync=False) filename = prefix + '.mrsb' path", "b.readonly_copy() assert readonly_copy.url == path values = ' '.join(value for", "readonly_copy) assert values == 'test a This is' values =", "'.join(value for key, value in b) assert values == ''", "assert readonly_copy.url is None def test_writing(tmpdir): b = WriteBucket(2, 4,", "listdir = tmpdir.listdir() assert listdir == [path] readonly_copy = b.readonly_copy()", "b) assert values == 'test a This is' b.sort() values", "= tmpdir.listdir() assert listdir == [] def test_roundtrip_write_only(tmpdir): b =", "== 'test a This is' b.sort() values = ' '.join(value", "assert values == 'This is a test' def test_write_only(): b", "assert listdir == [] b.addpair((1, 2)) filename = prefix +", "listdir == [] b.addpair((4, 'test')) b.collect([(3, 'a'), (1, 'This'), (2,", "b.addpair((1, 2)) filename = prefix + '.mrsb' path = tmpdir.join(filename).strpath", "tmpdir.listdir() assert listdir == [] b.addpair((4, 'test'), write_only=True) b.collect([(3, 'a'),", "b) assert values == 'This is a test' def test_write_only():", "key, value in readonly_copy.stream()) assert values == 'test a This", "== 'test a This is' b.clean() listdir = tmpdir.listdir() assert", "tmpdir.listdir() assert listdir == [path] readonly_copy = b.readonly_copy() assert readonly_copy.url", "assert values == 'test a This is' b.close_writer(do_sync=False) filename =", "listdir = tmpdir.listdir() assert listdir == [] def test_roundtrip_write_only(tmpdir): b", "assert values == '' b.close_writer(do_sync=False) filename = prefix + '.mrsx'", "= WriteBucket(0, 0) b.addpair((4, 'test'), write_only=True) b.collect([(3, 'a'), (1, 'This'),", "key, value in b) assert values == 'test a This", "== '' b.close_writer(do_sync=False) filename = prefix + '.mrsx' path =", "== '' values = ' '.join(value for key, value in", "[] b.addpair((1, 2)) filename = prefix + '.mrsb' path =", "== [] b.addpair((1, 2)) filename = prefix + '.mrsb' path", "for key, value in b) assert values == 'test a", "b.collect([(3, 'a'), (1, 'This'), (2, 'is')], write_only=True) values = '", "b.clean() listdir = tmpdir.listdir() assert listdir == [] # vim:", "= b.prefix() assert prefix == 'source_2_split_4_' listdir = tmpdir.listdir() assert", "[] def test_roundtrip_write_only(tmpdir): b = WriteBucket(7, 1, dir=tmpdir.strpath, format=HexWriter) prefix", "== 'source_2_split_4_' listdir = tmpdir.listdir() assert listdir == [] b.addpair((4,", "= ' '.join(value for key, value in readonly_copy) assert values", "in b) assert values == '' b.close_writer(do_sync=False) filename = prefix", "'.join(value for key, value in readonly_copy) assert values == ''", "prefix = b.prefix() assert prefix == 'source_2_split_4_' listdir = tmpdir.listdir()", "'This is a test' def test_write_only(): b = WriteBucket(0, 0)", "readonly_copy.stream()) assert values == 'test a This is' b.clean() listdir", "from mrs import BinWriter, HexWriter def test_writebucket(): b = WriteBucket(0,", "for key, value in b) assert values == '' readonly_copy", "This is' b.clean() listdir = tmpdir.listdir() assert listdir == []", "for key, value in readonly_copy) assert values == '' values", "assert readonly_copy.url == path def test_roundtrip(tmpdir): b = WriteBucket(2, 4,", "def test_writing(tmpdir): b = WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter) prefix =", "[path] readonly_copy = b.readonly_copy() assert readonly_copy.url == path def test_roundtrip(tmpdir):", "'source_7_split_1_' listdir = tmpdir.listdir() assert listdir == [] b.addpair((4, 'test'),", "in b) assert values == 'This is a test' def", "in readonly_copy) assert values == 'test a This is' values", "b.addpair((4, 'test')) b.collect([(3, 'a'), (1, 'This'), (2, 'is')]) values =", "readonly_copy.url == path def test_roundtrip(tmpdir): b = WriteBucket(2, 4, dir=tmpdir.strpath,", "in readonly_copy.stream()) assert values == 'test a This is' b.clean()", "listdir == [path] readonly_copy = b.readonly_copy() assert readonly_copy.url == path", "value in readonly_copy.stream()) assert values == 'test a This is'", "(2, 'is')]) values = ' '.join(value for key, value in", "b.sort() values = ' '.join(value for key, value in b)", "BinWriter, HexWriter def test_writebucket(): b = WriteBucket(0, 0) b.addpair((4, 'test'))", "b = WriteBucket(0, 0) b.addpair((4, 'test')) b.collect([(3, 'a'), (1, 'This'),", "[path] readonly_copy = b.readonly_copy() assert readonly_copy.url == path values =", "== [] b.addpair((4, 'test'), write_only=True) b.collect([(3, 'a'), (1, 'This'), (2,", "path def test_roundtrip(tmpdir): b = WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter) prefix", "= WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter) prefix = b.prefix() assert prefix", "b.close_writer(do_sync=False) filename = prefix + '.mrsx' path = tmpdir.join(filename).strpath listdir", "test_writebucket(): b = WriteBucket(0, 0) b.addpair((4, 'test')) b.collect([(3, 'a'), (1,", "b.collect([(3, 'a'), (1, 'This'), (2, 'is')]) values = ' '.join(value", "values == 'test a This is' b.close_writer(do_sync=False) filename = prefix", "4, dir=tmpdir.strpath, format=BinWriter) prefix = b.prefix() assert prefix == 'source_2_split_4_'", "' '.join(value for key, value in readonly_copy) assert values ==", "assert values == '' values = ' '.join(value for key,", "prefix = b.prefix() assert prefix == 'source_7_split_1_' listdir = tmpdir.listdir()", "write_only=True) values = ' '.join(value for key, value in b)", "readonly_copy.url is None def test_writing(tmpdir): b = WriteBucket(2, 4, dir=tmpdir.strpath,", "values == '' values = ' '.join(value for key, value", "'test a This is' values = ' '.join(value for key,", "prefix + '.mrsb' path = tmpdir.join(filename).strpath listdir = tmpdir.listdir() assert", "values == 'test a This is' b.clean() listdir = tmpdir.listdir()", "== 'This is a test' def test_write_only(): b = WriteBucket(0,", "assert values == '' readonly_copy = b.readonly_copy() assert readonly_copy.url is", "== path def test_roundtrip(tmpdir): b = WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter)", "test_roundtrip(tmpdir): b = WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter) prefix = b.prefix()", "WriteBucket(0, 0) b.addpair((4, 'test'), write_only=True) b.collect([(3, 'a'), (1, 'This'), (2,", "is None def test_writing(tmpdir): b = WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter)", "== '' readonly_copy = b.readonly_copy() assert readonly_copy.url is None def", "dir=tmpdir.strpath, format=HexWriter) prefix = b.prefix() assert prefix == 'source_7_split_1_' listdir", "0) b.addpair((4, 'test')) b.collect([(3, 'a'), (1, 'This'), (2, 'is')]) values", "assert values == 'test a This is' values = '", "listdir = tmpdir.listdir() assert listdir == [] b.addpair((1, 2)) filename", "assert values == 'test a This is' b.clean() listdir =", "key, value in b) assert values == '' b.close_writer(do_sync=False) filename", "write_only=True) b.collect([(3, 'a'), (1, 'This'), (2, 'is')], write_only=True) values =", "' '.join(value for key, value in readonly_copy.stream()) assert values ==", "for key, value in readonly_copy) assert values == 'test a", "b.close_writer(do_sync=False) filename = prefix + '.mrsb' path = tmpdir.join(filename).strpath listdir", "(1, 'This'), (2, 'is')], write_only=True) values = ' '.join(value for", "assert prefix == 'source_7_split_1_' listdir = tmpdir.listdir() assert listdir ==", "'test a This is' b.close_writer(do_sync=False) filename = prefix + '.mrsb'", "b.prefix() assert prefix == 'source_7_split_1_' listdir = tmpdir.listdir() assert listdir", "prefix + '.mrsx' path = tmpdir.join(filename).strpath listdir = tmpdir.listdir() assert", "tmpdir.listdir() assert listdir == [] # vim: et sw=4 sts=4", "value in readonly_copy) assert values == '' values = '", "tmpdir.join(filename).strpath listdir = tmpdir.listdir() assert listdir == [path] readonly_copy =", "def test_roundtrip_write_only(tmpdir): b = WriteBucket(7, 1, dir=tmpdir.strpath, format=HexWriter) prefix =", "'source_2_split_4_' listdir = tmpdir.listdir() assert listdir == [] b.addpair((4, 'test'))", "' '.join(value for key, value in b) assert values ==", "a This is' b.sort() values = ' '.join(value for key,", "assert listdir == [] def test_roundtrip_write_only(tmpdir): b = WriteBucket(7, 1,", "b) assert values == '' b.close_writer(do_sync=False) filename = prefix +", "[] b.addpair((4, 'test')) b.collect([(3, 'a'), (1, 'This'), (2, 'is')]) values", "listdir == [] b.addpair((4, 'test'), write_only=True) b.collect([(3, 'a'), (1, 'This'),", "== path values = ' '.join(value for key, value in", "assert listdir == [] b.addpair((4, 'test')) b.collect([(3, 'a'), (1, 'This'),", "b = WriteBucket(0, 0) b.addpair((4, 'test'), write_only=True) b.collect([(3, 'a'), (1,", "b) assert values == '' readonly_copy = b.readonly_copy() assert readonly_copy.url", "WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter) prefix = b.prefix() assert prefix ==", "b.addpair((4, 'test'), write_only=True) b.collect([(3, 'a'), (1, 'This'), (2, 'is')], write_only=True)", "== [path] readonly_copy = b.readonly_copy() assert readonly_copy.url == path def", "in b) assert values == 'test a This is' b.sort()", "value in b) assert values == 'test a This is'", "in readonly_copy) assert values == '' values = ' '.join(value", "import BinWriter, HexWriter def test_writebucket(): b = WriteBucket(0, 0) b.addpair((4,", "'test')) b.collect([(3, 'a'), (1, 'This'), (2, 'is')]) values = '", "0) b.addpair((4, 'test'), write_only=True) b.collect([(3, 'a'), (1, 'This'), (2, 'is')],", "2)) filename = prefix + '.mrsb' path = tmpdir.join(filename).strpath listdir", "= WriteBucket(0, 0) b.addpair((4, 'test')) b.collect([(3, 'a'), (1, 'This'), (2,", "a This is' b.clean() listdir = tmpdir.listdir() assert listdir ==", "listdir == [] b.addpair((1, 2)) filename = prefix + '.mrsb'", "'test a This is' b.clean() listdir = tmpdir.listdir() assert listdir", "mrs import BinWriter, HexWriter def test_writebucket(): b = WriteBucket(0, 0)", "'a'), (1, 'This'), (2, 'is')], write_only=True) values = ' '.join(value", "'.join(value for key, value in readonly_copy) assert values == 'test", "key, value in readonly_copy) assert values == 'test a This", "= tmpdir.listdir() assert listdir == [] # vim: et sw=4", "== 'test a This is' b.close_writer(do_sync=False) filename = prefix +", "assert listdir == [path] readonly_copy = b.readonly_copy() assert readonly_copy.url ==", "'test'), write_only=True) b.collect([(3, 'a'), (1, 'This'), (2, 'is')], write_only=True) values", "assert prefix == 'source_2_split_4_' listdir = tmpdir.listdir() assert listdir ==", "assert readonly_copy.url == path values = ' '.join(value for key,", "path values = ' '.join(value for key, value in readonly_copy)", "prefix == 'source_7_split_1_' listdir = tmpdir.listdir() assert listdir == []", "values == 'test a This is' b.sort() values = '", "import WriteBucket from mrs import BinWriter, HexWriter def test_writebucket(): b", "This is' values = ' '.join(value for key, value in", "listdir = tmpdir.listdir() assert listdir == [] b.addpair((4, 'test')) b.collect([(3,", "in b) assert values == 'test a This is' b.close_writer(do_sync=False)", "== 'source_7_split_1_' listdir = tmpdir.listdir() assert listdir == [] b.addpair((4,", "is' b.close_writer(do_sync=False) filename = prefix + '.mrsb' path = tmpdir.join(filename).strpath", "is' b.clean() listdir = tmpdir.listdir() assert listdir == [] def", "= tmpdir.listdir() assert listdir == [] b.addpair((4, 'test'), write_only=True) b.collect([(3,", "is' b.clean() listdir = tmpdir.listdir() assert listdir == [] #", "readonly_copy = b.readonly_copy() assert readonly_copy.url is None def test_writing(tmpdir): b", "'.join(value for key, value in b) assert values == 'test", "for key, value in b) assert values == '' b.close_writer(do_sync=False)", "key, value in readonly_copy) assert values == '' values =", "'.join(value for key, value in b) assert values == 'This", "readonly_copy = b.readonly_copy() assert readonly_copy.url == path values = '", "from mrs.bucket import WriteBucket from mrs import BinWriter, HexWriter def", "(1, 'This'), (2, 'is')]) values = ' '.join(value for key,", "values == 'This is a test' def test_write_only(): b =", "== [] b.addpair((4, 'test')) b.collect([(3, 'a'), (1, 'This'), (2, 'is')])", "b.prefix() assert prefix == 'source_2_split_4_' listdir = tmpdir.listdir() assert listdir", "= prefix + '.mrsx' path = tmpdir.join(filename).strpath listdir = tmpdir.listdir()", "values == 'test a This is' values = ' '.join(value", "= ' '.join(value for key, value in b) assert values", "readonly_copy.url == path values = ' '.join(value for key, value", "a This is' values = ' '.join(value for key, value", "dir=tmpdir.strpath, format=BinWriter) prefix = b.prefix() assert prefix == 'source_2_split_4_' listdir", "test_write_only(): b = WriteBucket(0, 0) b.addpair((4, 'test'), write_only=True) b.collect([(3, 'a'),", "== [path] readonly_copy = b.readonly_copy() assert readonly_copy.url == path values", "def test_write_only(): b = WriteBucket(0, 0) b.addpair((4, 'test'), write_only=True) b.collect([(3,", "b.readonly_copy() assert readonly_copy.url is None def test_writing(tmpdir): b = WriteBucket(2,", "== [] def test_roundtrip_write_only(tmpdir): b = WriteBucket(7, 1, dir=tmpdir.strpath, format=HexWriter)", "for key, value in readonly_copy.stream()) assert values == 'test a", "+ '.mrsx' path = tmpdir.join(filename).strpath listdir = tmpdir.listdir() assert listdir", "def test_roundtrip(tmpdir): b = WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter) prefix =", "path = tmpdir.join(filename).strpath listdir = tmpdir.listdir() assert listdir == [path]", "(2, 'is')], write_only=True) values = ' '.join(value for key, value", "b = WriteBucket(7, 1, dir=tmpdir.strpath, format=HexWriter) prefix = b.prefix() assert", "WriteBucket(0, 0) b.addpair((4, 'test')) b.collect([(3, 'a'), (1, 'This'), (2, 'is')])", "value in readonly_copy) assert values == 'test a This is'", "= b.readonly_copy() assert readonly_copy.url is None def test_writing(tmpdir): b =", "1, dir=tmpdir.strpath, format=HexWriter) prefix = b.prefix() assert prefix == 'source_7_split_1_'", "key, value in b) assert values == 'This is a", "'a'), (1, 'This'), (2, 'is')]) values = ' '.join(value for", "'source_2_split_4_' listdir = tmpdir.listdir() assert listdir == [] b.addpair((1, 2))", "test_writing(tmpdir): b = WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter) prefix = b.prefix()", "values = ' '.join(value for key, value in readonly_copy) assert", "+ '.mrsb' path = tmpdir.join(filename).strpath listdir = tmpdir.listdir() assert listdir", "tmpdir.listdir() assert listdir == [] b.addpair((1, 2)) filename = prefix", "test_roundtrip_write_only(tmpdir): b = WriteBucket(7, 1, dir=tmpdir.strpath, format=HexWriter) prefix = b.prefix()", "== 'test a This is' values = ' '.join(value for", "in b) assert values == '' readonly_copy = b.readonly_copy() assert", "value in b) assert values == '' b.close_writer(do_sync=False) filename =", "value in b) assert values == 'This is a test'", "assert listdir == [] b.addpair((4, 'test'), write_only=True) b.collect([(3, 'a'), (1,", "for key, value in b) assert values == 'This is", "= b.readonly_copy() assert readonly_copy.url == path def test_roundtrip(tmpdir): b =", "tmpdir.listdir() assert listdir == [] b.addpair((4, 'test')) b.collect([(3, 'a'), (1," ]
[ "to this point. DemPref utilizes only the most recent \"\"\"", "descending order by time of creation: all_files = np.array(list(Path.iterdir(data_path))) all_csvs", "a \"potential term\" defined as an \"additional # tensor...to be", "domain: Environment, feedback: list ) -> np.ndarray: \"\"\"Update the model's", "bool, update_func: str, beta_pref: float, ) -> None: \"\"\" Initialize", "= [phi[rank] - phi[rank]], [] for key in sorted(phi.keys()): if", "domain.features_from_trajectory(x.trajectory) for x in self.demos ] self._sampler.load_demo(np.array(phi_demos)) self.cleaned_demos = self.demos", "model logp\"(PyMC3 developer # guide). In this instance, the potential", "maximum volume removal objective function. Note: This objective uses the", "the sampler.\"\"\" self.phi_prefs = [] def sample(self, N: int, T:", "np.ndarray, w_samples: np.ndarray ) -> float: \"\"\" Approximate the maximum", ") query_diff = max(query_diffs) query = Query( query_type=Preference, task=None, start_state=query_state,", ".csv and convert it to a dictionary: chosen_csv = sorted_csvs[-which_csv]", "instantiate the agent according to arguments corresponding to what the", "-objective(features_each_q_option, w_samples) elif self.update_func == \"approx\": return -approx_objective(features_each_q_option, w_samples) else:", "rankings: exp_rewards_sorted = [None] * len(rank) for i in range(len(rank)):", "\"pick_best\" or self.update_func == \"approx\" or self.update_func == \"rank\" ),", "# Currently must be False ) assert ( self.generate_scenario is", "self._dempref_agent_parameters[\"beta_demo\"][0] self.beta_pref = self._dempref_agent_parameters[\"beta_pref\"][0] self.beta_teacher = self._dempref_agent_parameters[\"beta_teacher\"][0] \"\"\"If we want", "sampler and query generator: \"\"\" self._sampler = None self._w_samples =", "1.0), -100, # -np.inf, self.update_function(w), ) return result try: #", "self.query_option_count ) ] ) ) for i in range(len(self.phi_prefs)) ]", "= 0 self._query_index = 0 self._w_dim = w_dim assert (", "self.w_samples def reset(self) -> None: \"\"\"Prepare for new query session.\"\"\"", "import Dict, List import arviz as az from inquire.agents.agent import", "= visualize \"\"\" Get the pre-defined agent parameters \"\"\" self._dempref_agent_parameters", "print(\"Generating query_options\") while query_diff <= self.epsilon: if self.incl_prev_query: if last_query_choice.null:", "query_options_trajectories = [ Trajectory(raw_trajectories[i], raw_phis[i]) for i in range(len(raw_trajectories)) ]", "* lower_input_bound, high=self.num_new_queries * upper_input_bound, size=(self.num_new_queries * z), ), args=(self.domain,", "\"\"\" data_path = Path.cwd() / Path(\"../inquire/agents/\") # Sort the .csvs", "\"\"\"Read an agent-parameterization .csv. ::inputs: :creation_index: A time-descending .csv file", "i in range(len(rank)): result[i] = phi[rank[i]] elif self.update_func == \"approx\":", "{k: features[k] for k in range(len(query_options))} self._sampler.load_prefs(phi, choice_index) self.w_samples =", "raise Exception( update_func + \" is not a valid update", "objective. :param features: the feature values of each query option", "except ( pm.SamplingError, pm.parallel_sampling.ParallelSamplingError, ): return None return trace class", "want to generate the scenario -- i.e., other agents' behavior", "self.domain.features_from_trajectory(t) for t in raw_trajectories ] query_options_trajectories = [ Trajectory(raw_trajectories[i],", "self._interaction_types = interaction_types self._visualize = visualize \"\"\" Get the pre-defined", "get_trace(self, test_val: np.ndarray) -> az.InferenceData: \"\"\"Create an MCMC trace.\"\"\" #", "= sorted(all_csvs, key=os.path.getmtime) sorted_csvs = [Path(c) for c in sorted_csvs]", "feedback received to this point. DemPref utilizes only the most", "1.0 - 1.0 / np.sum( np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed)", ") return result elif self.update_func == \"rank\": def update_function(distribution): result", "in inquire/tests/evaluation.py and the respective domain. \"\"\" self._weight_sample_count = weight_sample_count", "d in self.cleaned_demos] class DemPrefSampler: \"\"\"Sample trajectories for querying. Code", "self.include_previous_query = include_previous_query self.generate_scenario = ( generate_scenario # Currently must", "(queries) \"\"\" start = time.perf_counter() def func(controls: np.ndarray, *args) ->", "pandas as pd import pymc3 as pm import pymc3.distributions.transforms as", "many samples before the chain converges; these initial samples are", "of weights and # features; this difference is a trait", "to contain the control input for all queries :param args:", "self.n_samples_exp, self.beta_demo, self.beta_pref, self.beta_teacher, ] self.df = pd.DataFrame(columns=[\"run #\", \"pref_iter\",", "/ w_samples.shape[0] ) # 1 x 1 -- summed across", "# features: query_option_count x feature_size # w_samples: n_samples x feature_size", "az.plot_energy(trace) plt.show() input(\"Press enter to continue\") az.plot_posterior(trace) plt.show() input(\"Press enter", "concatenated to contain the control input for all queries :param", "# query_option_count x feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T), axis=1)", "1\" assert ( num_expectation_samples >= 1 ), \"QueryGenerator.__init__: num_expectation_samples must", "irrationality of teacher in providing demonstrations :param beta_pref: parameter measuring", "= trajectory_sample_count self._trajectory_length = trajectory_length self._interaction_types = interaction_types self._visualize =", "verbose: bool = False, ) -> list: \"\"\"Generate query using", "= np.random.randint(len(self.demos)) else: self.random_scenario_index = 0 last_query_choice = self.all_query_choices[ self.random_scenario_index", "if self.incl_prev_query: self.all_query_choices = [d for d in self.cleaned_demos] class", "is last_query_choice is blank. (Only True if not using Dempref", "Dempref but using incl_prev_) :return: a list of trajectories (queries)", "This objective uses the Plackett-Luce model of teacher behavior. CANNOT", ":param w_samples: Samples of w :param last_query_choice: The previously selected", "True is last_query_choice is blank. (Only True if not using", ": (i + 1) * z] for i in range(self.num_new_queries)", ") volumes_removed.append(1 - value) return np.min(volumes_removed) # The following optimization", "df = pd.read_csv(chosen_csv) params_dict = df.to_dict() return params_dict def process_demonstrations(", "\"\"\" def __init__( self, query_option_count: int, dim_features: int, update_func: str", "w_samples), bounds=self.domain.control_bounds * self.num_new_queries * self.trajectory_length, approx_grad=True, ) query_options_controls =", "scipy.optimize as opt import theano.tensor as tt class DemPref(Agent): \"\"\"A", "self.w_samples, blank_traj=True ) else: query_options = self._query_generator.generate_query_options( self.w_samples, last_query_choice )", "-> np.ndarray: \"\"\"Randomly initialize weights for gradient descent.\"\"\" self.reset() return", "= trajectory_length self.num_expectation_samples = num_expectation_samples self.include_previous_query = include_previous_query self.generate_scenario =", "== \".csv\" for f in all_files]) ] all_csvs = np.array([str(f[0]).strip()", "Environment from inquire.interactions.feedback import Query, Trajectory from inquire.interactions.modalities import Preference", "norm <= 1: break # Get a sampling trace (and", "np.ndarray) -> float: \"\"\" Maximize the volume removal objective. :param", "use approximation to update function if query_option_count > 2\" elif", "for MCMC. NOTE the DemPref codebase creates a sampler via", "> 0 ), \"Cannot include previous query if no demonstration", "assert ( trajectory_length >= 1 ), \"QueryGenerator.__init__: trajectory_length must be", "self.phi_prefs.append(np.array(result)) def clear_pref(self): \"\"\"Clear all preference information from the sampler.\"\"\"", "if self.update_func == \"approx\": def update_function(distribution): result = tt.sum( [", "* upper_input_bound, size=(self.num_new_queries * z), ), args=(self.domain, w_samples), bounds=self.domain.control_bounds *", "provided options\") if self.incl_prev_query and self.teacher_type == \"term\": assert (", "as pm import pymc3.distributions.transforms as tr import scipy.optimize as opt", "Initialize the approx query generation. Note: this class generates queries", "np.ndarray, last_query_choice: Trajectory = None, blank_traj: bool = False, )", "\"rank\", \"pick_best\", and \"approx\". To use \"approx\", query_option_count must be", "self, dom: Environment, num_queries: int, trajectory_length: int, num_expectation_samples: int, include_previous_query:", "most recently created. \"\"\" data_path = Path.cwd() / Path(\"../inquire/agents/\") #", "received to this point. DemPref utilizes only the most recent", "-> float: \"\"\" The ranking maximum volume removal objective function.", "weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T)) / w_samples.shape[0] ) # 1", "is False ), \"Cannot generate scenario when using approximate gradients\"", "np.ndarray ) -> float: \"\"\" Approximate the maximum volume removal", "[ controls[i * z : (i + 1) * z]", "\"approx\" or self.update_func == \"rank\" ), (\"Update\" \" function must", "adapted from original DemPref agent. \"\"\" def __init__( self, query_option_count:", "1 -- summed across w_samples volumes_removed = [] rankings =", "to save data as they did in DemPref:\"\"\" self.first_q_session =", "\" is not a valid update function.\" ) # feature", "num_queries self.trajectory_length = trajectory_length self.num_expectation_samples = num_expectation_samples self.include_previous_query = include_previous_query", "sampler via PyMC3 version 3.5; this codebase adapts their model", "a domain's start state; that's handled in inquire/tests/evaluation.py and the", "by time of creation: all_files = np.array(list(Path.iterdir(data_path))) all_csvs = all_files[", "blank_traj: features_each_q_option = np.append( features_each_q_option, domain.features_from_trajectory(last_query_choice), axis=1, ) if self.update_func", "- exp_rewards_sorted[i] ) ) ) volumes_removed.append(1 - value) return np.min(volumes_removed)", "as np import pandas as pd import pymc3 as pm", "optimization is w.r.t. the linear combination of weights and #", "summed across w_samples volumes_removed = [] rankings = itertools.permutations( list(range(self.num_queries))", "0 self._query_index = 0 self._w_dim = w_dim assert ( self.update_func", "trajectories for querying. Code adapted from original DemPref agent. \"\"\"", ":start_state: The state from which a trajectory begins. \"\"\" assert", "choice = feedback[-1].choice.selection choice_index = query_options.index(choice) if self.incl_prev_query: self.all_query_choices[self.random_scenario_index] =", "will throw an assertion error otherwise :param beta_demo: parameter measuring", ":param update_func: options are \"rank\", \"pick_best\", and \"approx\". To use", "\"approx\", and \"rank\" :beta_pref: the rationality parameter for the teacher", "as plt import numpy as np import pandas as pd", "feedback: list ) -> np.ndarray: \"\"\"Update the model's learned weights.", "of creation: all_files = np.array(list(Path.iterdir(data_path))) all_csvs = all_files[ np.argwhere([f.suffix ==", "self.epsilon = self._dempref_agent_parameters[\"epsilon\"][0] \"\"\" Instantiate the DemPref-specific sampler and query", "1\" assert ( trajectory_length >= 1 ), \"QueryGenerator.__init__: trajectory_length must", "= trajectories phi_demos = [ domain.features_from_trajectory(x.trajectory) for x in self.demos", "model for MCMC. NOTE the DemPref codebase creates a sampler", "terms in PL-update -tt.log( [ tt.sum( # sum down different", "to the model logp\"(PyMC3 developer # guide). In this instance,", "query ::Inquire-specific inputs: :start_state: The state from which a trajectory", "\"\"\" import itertools import os import time from pathlib import", "demonstrations and preferences. Code adapted from Learning Reward Functions by", "trajectories (queries) \"\"\" start = time.perf_counter() def func(controls: np.ndarray, *args)", "evaluated on the given queries' features \"\"\" if features.shape[0] >", "range(self.num_new_queries) ] end = time.perf_counter() print(f\"Finished computing queries in {end", "the length of each query :num_expectation_samples: number of w_samples to", "rv_x = pm.Uniform( name=\"rv_x\", shape=self.dim_features, lower=-1, upper=1, testval=test_val, ) #", "N samples from the distribution. The distribution is defined by", "DemPref:\"\"\" self.first_q_session = True self.q_session_index = 0 self.query_index = 0", "assert ( num_queries >= 1 ), \"QueryGenerator.__init__: num_queries must be", "corresponding to what the the original paper's codebase designates as", "\"pick_best\" ): raise Exception( update_func + \" is not a", "== \"pick_best\": result, tmp = [phi[rank] - phi[rank]], [] for", "None self._w_samples = None self._query_generator = None self._first_q_session = True", "measuring irrationality of teacher in providing demonstrations :param beta_pref: parameter", ") # query_option_count x 1 -- summed across w_samples v_removed", "from inquire.environments.environment import Environment from inquire.interactions.feedback import Query, Trajectory from", "opt.fmin_l_bfgs_b( func, x0=np.random.uniform( low=self.num_new_queries * lower_input_bound, high=self.num_new_queries * upper_input_bound, size=(self.num_new_queries", "bounds=self.domain.control_bounds * self.num_new_queries * self.trajectory_length, approx_grad=True, ) query_options_controls = [", "( self.query_option_count == 2 ), \"Cannot use approximation to update", "= self._dempref_agent_parameters[\"trim_start\"][0] self.query_option_count = self._dempref_agent_parameters[ \"query_option_count\" ][0] self.update_func = self._dempref_agent_parameters[\"update_func\"][0]", "\"type\", \"value\"] ), ignore_index=True, ) def generate_query( self, domain: Environment,", "self.phi_demos = phi_demos def load_prefs(self, phi: Dict, rank): \"\"\" Load", "- phi[rank]], [] for key in sorted(phi.keys()): if key !=", "from rankings to query-option features; # load into sampler: features", "result = tt.sum( [ -tt.log( tt.sum( tt.exp( self.beta_pref * tt.dot(", "return self.w_samples else: # Use the most recent Choice in", "result = tt.switch( pm.math.gt(w_sum, 1.0), -100, # -np.inf, self.update_function(w), )", "= pm.Potential(\"sphere\", sphere(rv_x)) trace = pm.sample( 10000, tune=5000, return_inferencedata=True, init=\"adapt_diag\",", "boolean for whether we want to generate the scenario --", "= domain.features_from_trajectory( c, controls_as_input=True ) if self.include_previous_query and not blank_traj:", "exp_rewards_sorted = [None] * len(rank) for i in range(len(rank)): exp_rewards_sorted[rank[i]]", ") else: query_options = self._query_generator.generate_query_options( self.w_samples ) query_diffs = []", "generate at each time step :trajectory_length: the length of each", "w_samples to use in approximating the objective function :include_previous_query: boolean", "needn't maintain a domain's start state; that's handled in inquire/tests/evaluation.py", "[] def load_demo(self, phi_demos: np.ndarray): \"\"\" Load the demonstrations into", "initialize_weights(self, domain: Environment) -> np.ndarray: \"\"\"Randomly initialize weights for gradient", "map from rankings to query-option features; # load into sampler:", "from the samples: mean_w = np.mean(self.w_samples, axis=0) mean_w = mean_w", ") + tt.sum( self.beta_demo * tt.dot(self.phi_demos, distribution) ) return result", ":dom: the domain to generate queries on :num_queries: number of", "] ) ) for i in range(len(self.phi_prefs)) ] ) +", "set of controls \"\"\" domain = args[0] w_samples = args[1]", "if not using Dempref but using incl_prev_) :return: a list", "\"\"\"Define model for MCMC. NOTE the DemPref codebase creates a", "query_options_controls ] raw_phis = [ self.domain.features_from_trajectory(t) for t in raw_trajectories", "np.ndarray: \"\"\"Return N samples from the distribution. The distribution is", "the DemPref codebase creates a sampler via PyMC3 version 3.5;", "if trace is not None: break if self._visualize: az.plot_trace(trace) plt.show()", "must be at least 1\" assert ( trajectory_length >= 1", "update function: if self.update_func == \"approx\": def update_function(distribution): result =", "self._sampler = None self._w_samples = None self._query_generator = None self._first_q_session", "update_func self.beta_pref = beta_pref self.num_new_queries = ( self.num_queries - 1", "different queries [ tt.sum( # sum across different terms in", "#\", \"pref_iter\", \"type\", \"value\"]) def initialize_weights(self, domain: Environment) -> np.ndarray:", "self.gen_scenario = self._dempref_agent_parameters[\"gen_scenario\"][0] self.n_pref_iters = self._dempref_agent_parameters[\"n_pref_iters\"][0] self.epsilon = self._dempref_agent_parameters[\"epsilon\"][0] \"\"\"", "for each demonstration; has dimension n_dem -by- self.dim_features \"\"\" self.phi_demos", "clear_pref(self): \"\"\"Clear all preference information from the sampler.\"\"\" self.phi_prefs =", "*args) -> float: \"\"\"Minimize via L_BFGS. :param controls: an array,", "distribution) ) return result elif self.update_func == \"rank\": def update_function(distribution):", "= num_queries self.trajectory_length = trajectory_length self.num_expectation_samples = num_expectation_samples self.include_previous_query =", "observed thus far. :param N: number of w_samples to draw.", "( np.sum(np.dot(features, w_samples.T), axis=1) / w_samples.shape[0] ) # query_option_count x", "= [ self.domain.run(c) for c in query_options_controls ] raw_phis =", "\"QueryGenerator.__init__: num_expectation_samples must be \\ at least 1\" self.domain =", "sum across different terms in PL-update -tt.log( [ tt.sum( #", "continue\") az.plot_posterior(trace) plt.show() input(\"Press enter to continue\") all_samples = trace.sel(", "self.domain.control_bounds ] * self.trajectory_length opt_res = opt.fmin_l_bfgs_b( func, x0=np.random.uniform( low=self.num_new_queries", "discarded :return: list of w_samples drawn \"\"\" \"\"\"Define model for", "self.update_func == \"rank\" ), (\"Update\" \" function must be one", "i in range(len(rank)): exp_rewards_sorted[rank[i]] = exp_rewards[i] value, i = 1,", "== \"approx\": result = phi[rank] - phi[1 - rank] elif", "state: raw_trajectories = [ self.domain.run(c) for c in query_options_controls ]", "= [ controls[i * z : (i + 1) *", "from original DemPref agent. \"\"\" def __init__( self, dom: Environment,", "float: \"\"\" Maximize the volume removal objective. :param features: a", "model's learned weights. ::inputs: ::current_weights: Irrelevant for DemPref; useful to", "index. e.g. if creation_index = 0, use the dempref dempref_agent.csv", "query into the Sampler. :param phi: a dictionary mapping rankings", "= False, ) -> List[Trajectory]: \"\"\" Generate self.num_queries number of", "that (locally) maximize the maximum volume removal objective. :param w_samples:", "float: \"\"\" The ranking maximum volume removal objective function. Note:", "second is the samples that will be used to approximate", "beginning # this query session; domain.run(c) will thus reset to", "False, ) -> list: \"\"\"Generate query using approximate gradients. Code", "unit ball.\"\"\" w_sum = pm.math.sqr(w).sum() result = tt.switch( pm.math.gt(w_sum, 1.0),", "features \"\"\" volumes_removed = [] for i in range(len(features)): feature_diff", "w_samples = np.array([r / np.linalg.norm(r) for r in all_samples]) return", "An agent which uses demonstrations and preferences. Code adapted from", "np.ndarray: \"\"\"Update the model's learned weights. ::inputs: ::current_weights: Irrelevant for", "what the the original paper's codebase designates as their main", "i in range(self.num_new_queries) ] features_each_q_option = np.zeros( (domain.w_dim, self.num_new_queries) )", "def update_function(distribution): result = tt.sum( [ -tt.log( tt.sum( tt.exp( self.beta_pref", "following optimization is w.r.t. volume removal; the domain's # optimization", "/ np.sum( np.exp( self.beta_pref * ( np.array(exp_rewards_sorted[i:]) - exp_rewards_sorted[i] )", "0 print(\"Generating query_options\") while query_diff <= self.epsilon: if self.incl_prev_query: if", "tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i], distribution ) ) ) )", "k in range(len(query_options))} self._sampler.load_prefs(phi, choice_index) self.w_samples = self._sampler.sample(N=self.n_samples_summ) # Return", "== \"rank\" ), (\"Update\" \" function must be one of", "and preferences observed thus far. :param N: number of w_samples", "az from inquire.agents.agent import Agent from inquire.environments.environment import Environment from", "[], w_dim: int = 4, which_param_csv: int = 0, visualize:", "weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T), axis=1) / w_samples.shape[0] ) #", "via L_BFGS. :param controls: an array, concatenated to contain the", "] ) + tt.sum( self.beta_demo * tt.dot(self.phi_demos, distribution) ), )", "Exception( update_func + \" is not a valid update function.\"", "range(len(rank)): exp_rewards_sorted[rank[i]] = exp_rewards[i] value, i = 1, 0 for", "self.incl_prev_query = self._dempref_agent_parameters[ \"incl_prev_query\" ][0] self.gen_scenario = self._dempref_agent_parameters[\"gen_scenario\"][0] self.n_pref_iters =", "-- summed across w_samples v_removed = 1.0 - np.minimum( 1.0,", "1.0 - np.minimum( 1.0, np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed) return", "across w_samples volumes_removed = [] rankings = itertools.permutations( list(range(self.num_queries)) )", "approximate the objective :return: the value of the objective function,", "0 ] self.trim_start = self._dempref_agent_parameters[\"trim_start\"][0] self.query_option_count = self._dempref_agent_parameters[ \"query_option_count\" ][0]", "tune=5000, return_inferencedata=True, init=\"adapt_diag\", ) # except: except ( pm.SamplingError, pm.parallel_sampling.ParallelSamplingError,", "for f in all_csvs]) sorted_csvs = sorted(all_csvs, key=os.path.getmtime) sorted_csvs =", "Energy): while True: trace = self.get_trace(test_value) if trace is not", "sample(self, N: int, T: int = 1, burn: int =", "on the demonstrations and preferences observed thus far. :param N:", "the demonstrations and preferences observed thus far. :param N: number", ":param N: number of w_samples to draw. :param T: if", "chosen_csv = sorted_csvs[-which_csv] df = pd.read_csv(chosen_csv) params_dict = df.to_dict() return", "for i in range(self.num_new_queries) ] end = time.perf_counter() print(f\"Finished computing", "/ Path(\"../inquire/agents/\") # Sort the .csvs in descending order by", "( num_queries >= 1 ), \"QueryGenerator.__init__: num_queries must be at", ":param phi: a dictionary mapping rankings (0,...,query_option_count-1) to feature vectors", "return result try: # Potential is a \"potential term\" defined", "the feature values of each query option :param w_samples: w_samples", "= sorted_csvs[-which_csv] df = pd.read_csv(chosen_csv) params_dict = df.to_dict() return params_dict", "the demonstrations into the Sampler. :param demos: a Numpy array", "self.teacher_type = self._dempref_agent_parameters[\"teacher_type\"][0] self.n_demos = self._dempref_agent_parameters[\"n_demos\"][0] self.gen_demos = self._dempref_agent_parameters[\"gen_demos\"][0] self.opt_iter_count", ".csv. ::inputs: :creation_index: A time-descending .csv file index. e.g. if", "for i in range(len(features)): feature_diff = ( features[i] - features[1", ") -> list: \"\"\"Generate query using approximate gradients. Code adapted", "None, blank_traj: bool = False, ) -> List[Trajectory]: \"\"\" Generate", "-- summed across w_samples volumes_removed = [] rankings = itertools.permutations(", "queries. Code adapted from original DemPref agent. \"\"\" def __init__(", "exp_rewards = ( np.sum(np.dot(features, w_samples.T), axis=1) / w_samples.shape[0] ) #", "as tt class DemPref(Agent): \"\"\"A preference-querying agent seeded with demonstrations.", "domain.run(c) will thus reset to appropriate # state: raw_trajectories =", "enter to continue\") az.plot_energy(trace) plt.show() input(\"Press enter to continue\") az.plot_posterior(trace)", "norm = (test_value ** 2).sum() if norm <= 1: break", "= np.array(list(Path.iterdir(data_path))) all_csvs = all_files[ np.argwhere([f.suffix == \".csv\" for f", "if self._query_generator is None: self._query_generator = self.DemPrefQueryGenerator( dom=domain, num_queries=self.query_option_count, trajectory_length=self.trajectory_length,", "approximate the objective function :return: the value of the objective", "dempref_agent.csv most recently created. \"\"\" data_path = Path.cwd() / Path(\"../inquire/agents/\")", "plt.show() input(\"Press enter to continue\") az.plot_posterior(trace) plt.show() input(\"Press enter to", "\"query_option_count\" ][0] self.update_func = self._dempref_agent_parameters[\"update_func\"][0] self.trajectory_length = self._dempref_agent_parameters[ \"trajectory_length\" ][0]", "trace = self.get_trace(test_value) if trace is not None: break if", "and self.teacher_type == \"term\": assert ( self.n_demos > 0 ),", "before beginning # this query session; domain.run(c) will thus reset", "= self._dempref_agent_parameters[ \"incl_prev_query\" ][0] self.gen_scenario = self._dempref_agent_parameters[\"gen_scenario\"][0] self.n_pref_iters = self._dempref_agent_parameters[\"n_pref_iters\"][0]", "using approximate gradients. Code adapted from DemPref's ApproxQueryGenerator. \"\"\" if", "query. Only required if self.incl_prev_query is True :param blank_traj: True", "which uses demonstrations and preferences. Code adapted from Learning Reward", "the sampler. :param query_option_count: Number of queries. :param dim_features: Dimension", "def clear_pref(self): \"\"\"Clear all preference information from the sampler.\"\"\" self.phi_prefs", "demonstrations into the Sampler. :param demos: a Numpy array containing", "\"rank\" ), (\"Update\" \" function must be one of the", "trace class DemPrefQueryGenerator: \"\"\"Generate queries. Code adapted from original DemPref", "via PyMC3 version 3.5; this codebase adapts their model to", "size=self.dim_features ) test_value = test_value / np.linalg.norm(test_value) norm = (test_value", "t in raw_trajectories ] query_options_trajectories = [ Trajectory(raw_trajectories[i], raw_phis[i]) for", "[ [self.q_session_index, 0, \"mean\", mean_w], [self.q_session_index, 0, \"var\", var_w], ]", "np.random.uniform( low=-1, high=1, size=self.dim_features ) test_value = test_value / np.linalg.norm(test_value)", "in range(len(features)): feature_diff = np.array( [f - features[i] for f", "beta_demo=self.beta_demo, beta_pref=self.beta_pref, visualize=self._visualize, ) self.w_samples = self._sampler.sample(N=self.n_samples_summ) \"\"\"If we want", "only the most recent \"\"\" if feedback == []: #", "# query_option_count x 1 -- summed across w_samples v_removed =", "iterating over all possible rankings for rank in rankings: exp_rewards_sorted", ") return result self.update_function = update_function while True: test_value =", "difference is a trait of the DemPref codebase. z =", "when using approximate gradients\" self.update_func = update_func self.beta_pref = beta_pref", "\"pref_iter\", \"type\", \"value\"]) def initialize_weights(self, domain: Environment) -> np.ndarray: \"\"\"Randomly", "from orginal codebase's 'runner.py' object. Note that some variable names", "sample are discarded :param burn: how many samples before the", "self._query_generator = self.DemPrefQueryGenerator( dom=domain, num_queries=self.query_option_count, trajectory_length=self.trajectory_length, num_expectation_samples=self.n_samples_exp, include_previous_query=self.incl_prev_query, generate_scenario=self.gen_scenario, update_func=self.update_func,", "# sum down different feature-differences in a single term in", "vectors from demonstrated trajectories self.phi_demos = np.zeros((1, self.dim_features)) # a", "Only required if self.incl_prev_query is True :param blank_traj: True is", "T^{th} sample are discarded :param burn: how many samples before", "sorted_csvs[-which_csv] df = pd.read_csv(chosen_csv) params_dict = df.to_dict() return params_dict def", "at least 1\" self.domain = dom self.num_queries = num_queries self.trajectory_length", "all_samples.reshape( all_samples.shape[0] * all_samples.shape[1], -1 ) w_samples = np.array([r /", "] - self.phi_prefs[i][j], distribution, ) ) ) for j in", "[ opt_res[0][i * z : (i + 1) * z]", "pm.math.sqr(w).sum() result = tt.switch( pm.math.gt(w_sum, 1.0), -100, # -np.inf, self.update_function(w),", "-np.inf, self.update_function(w), ) return result try: # Potential is a", "rank): \"\"\" Load the results of a preference query into", "boolean for whether one of the queries is the previously", "def load_demo(self, phi_demos: np.ndarray): \"\"\" Load the demonstrations into the", "= weight_sample_count self._trajectory_sample_count = trajectory_sample_count self._trajectory_length = trajectory_length self._interaction_types =", "sampling algorithm (an extension of Hamilitonian Monte Carlo MCMC): https://arxiv.org/abs/1111.4246.", "= 0 self._w_dim = w_dim assert ( self.update_func == \"pick_best\"", "given queries' features \"\"\" volumes_removed = [] for i in", "all_files = np.array(list(Path.iterdir(data_path))) all_csvs = all_files[ np.argwhere([f.suffix == \".csv\" for", "the volume removal objective. :param features: a list containing the", "raw_trajectories ] query_options_trajectories = [ Trajectory(raw_trajectories[i], raw_phis[i]) for i in", "[ -tt.log( tt.sum( tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i], distribution )", "codebase creates a sampler via PyMC3 version 3.5; this codebase", "Integrating Human Demonstrations and Preferences. \"\"\" import itertools import os", "tt.dot(self.phi_demos, distribution) ) return result elif self.update_func == \"pick_best\": def", "while True: test_value = np.random.uniform( low=-1, high=1, size=self.dim_features ) test_value", "of the objective function, evaluated on the given queries' features", "features] ) # query_option_count x feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff,", "= ( tt.sum( # sum across different queries [ tt.sum(", "self.n_demos = self._dempref_agent_parameters[\"n_demos\"][0] self.gen_demos = self._dempref_agent_parameters[\"gen_demos\"][0] self.opt_iter_count = self._dempref_agent_parameters[\"opt_iter_count\"][ 0", "in range(self.num_new_queries) ] end = time.perf_counter() print(f\"Finished computing queries in", "NOTE the DemPref codebase creates a sampler via PyMC3 version", "List, w_samples: np.ndarray) -> float: \"\"\" Maximize the volume removal", "int = 4, which_param_csv: int = 0, visualize: bool =", "the scenario -- i.e., other agents' behavior :update_func: the update_func", "dim_features: Dimension of feature vectors. :param update_func: options are \"rank\",", "= 1, 0 for i in range(len(rank) - 1): value", "trajectory_sample_count self._trajectory_length = trajectory_length self._interaction_types = interaction_types self._visualize = visualize", "result, tmp = [phi[rank] - phi[rank]], [] for key in", "descent.\"\"\" self.reset() return self.w_samples def reset(self) -> None: \"\"\"Prepare for", "args[0] w_samples = args[1] controls = np.array(controls) controls_set = [", "the model logp\"(PyMC3 developer # guide). In this instance, the", "feedback[-1].choice.selection choice_index = query_options.index(choice) if self.incl_prev_query: self.all_query_choices[self.random_scenario_index] = choice #", "i in range(len(features)): feature_diff = np.array( [f - features[i] for", "time-descending .csv file index. e.g. if creation_index = 0, use", "over all possible rankings for rank in rankings: exp_rewards_sorted =", "single term in PL-update tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i][ j:,", "T: if greater than 1, all samples except each T^{th}", "self.update_function = update_function while True: test_value = np.random.uniform( low=-1, high=1,", "if features.shape[0] > features.shape[1]: features = features.T volumes_removed = []", "values of each query :param w_samples: samples of w, used", "== \"rank\": result = [None] * len(rank) for i in", "selected query :generate_scenario: boolean for whether we want to generate", "samples except each T^{th} sample are discarded :param burn: how", "trajectory_length >= 1 ), \"QueryGenerator.__init__: trajectory_length must be at least", "Add random-variable x to model: rv_x = pm.Uniform( name=\"rv_x\", shape=self.dim_features,", "list of np.arrays containing feature difference vectors and # which", "for x in self.demos ] self._sampler.load_demo(np.array(phi_demos)) self.cleaned_demos = self.demos if", "demonstrations. Note: We instantiate the agent according to arguments corresponding", "i in range(self.num_new_queries) ] end = time.perf_counter() print(f\"Finished computing queries", "self.w_samples else: # Use the most recent Choice in feedback:", "= [None] * len(rank) for i in range(len(rank)): exp_rewards_sorted[rank[i]] =", "the domain's # optimization is w.r.t. the linear combination of", "converges; these initial samples are discarded :return: list of w_samples", "load_demo(self, phi_demos: np.ndarray): \"\"\" Load the demonstrations into the Sampler.", "DemPref agent. \"\"\" def __init__( self, dom: Environment, num_queries: int,", "function if query_option_count > 2\" elif not ( self.update_func ==", "domain, and the second is the samples that will be", "= self._dempref_agent_parameters[\"opt_iter_count\"][ 0 ] self.trim_start = self._dempref_agent_parameters[\"trim_start\"][0] self.query_option_count = self._dempref_agent_parameters[", "\"potential term\" defined as an \"additional # tensor...to be added", "parameters \"\"\" self._dempref_agent_parameters = self.read_param_csv(which_param_csv) \"\"\" Instance attributes from orginal", "), \"QueryGenerator.__init__: num_expectation_samples must be \\ at least 1\" self.domain", "using approx gradients. ::original inputs: :dom: the domain to generate", "] * self.trajectory_length upper_input_bound = [ x[1] for x in", "at each time step :trajectory_length: the length of each query", "== \"pick_best\": def update_function(distribution): result = tt.sum( [ -tt.log( tt.sum(", "np.mean(self.w_samples, axis=0) mean_w = mean_w / np.linalg.norm(mean_w) return np.array(mean_w, copy=True).reshape(1,", "pm.sample( 10000, tune=5000, return_inferencedata=True, init=\"adapt_diag\", ) # except: except (", "queries' features \"\"\" if features.shape[0] > features.shape[1]: features = features.T", "if creation_index = 0, use the dempref dempref_agent.csv most recently", "w_sum = pm.math.sqr(w).sum() result = tt.switch( pm.math.gt(w_sum, 1.0), -100, #", "effectively # the model's log-likelihood. p = pm.Potential(\"sphere\", sphere(rv_x)) trace", "approx_objective( features: np.ndarray, w_samples: np.ndarray ) -> float: \"\"\" Approximate", "trajectories=query_options, ) return query def update_weights( self, current_weights: np.ndarray, domain:", "continue\") all_samples = trace.sel( draw=slice(burn, None) ).posterior.rv_x.values all_samples = all_samples.reshape(", "session; domain.run(c) will thus reset to appropriate # state: raw_trajectories", "c, controls_as_input=True ) if self.include_previous_query and not blank_traj: features_each_q_option =", "[ self.domain.run(c) for c in query_options_controls ] raw_phis = [", "= num_expectation_samples self.include_previous_query = include_previous_query self.generate_scenario = ( generate_scenario #", ") # feature vectors from demonstrated trajectories self.phi_demos = np.zeros((1,", "beta_demo self.beta_pref = beta_pref self._visualize = visualize if self.update_func ==", "is not a valid update function.\" ) # feature vectors", "rankings for rank in rankings: exp_rewards_sorted = [None] * len(rank)", "pre-defined agent parameters \"\"\" self._dempref_agent_parameters = self.read_param_csv(which_param_csv) \"\"\" Instance attributes", "import os import time from pathlib import Path from typing", "def process_demonstrations( self, trajectories: list, domain: Environment ) -> None:", "features = [ domain.features_from_trajectory(x.trajectory) for x in query_options ] phi", "= 0 self.query_index = 0 self.config = [ self.teacher_type, self.n_demos,", "a preference query into the Sampler. :param phi: a dictionary", "n in range(m): query_diffs.append( np.linalg.norm( domain.features_from_trajectory( query_options[m].trajectory ) - domain.features_from_trajectory(", "trace (and avoid Bad Initial Energy): while True: trace =", "\"\"\"Generate demonstrations to seed the querying process.\"\"\" self.demos = trajectories", "domain: Environment, query_state: int, curr_w: np.ndarray, verbose: bool = False,", "pd.DataFrame(columns=[\"run #\", \"pref_iter\", \"type\", \"value\"]) def initialize_weights(self, domain: Environment) ->", "to PyMC3 version 3.11.2. We use the NUTS sampling algorithm", "raw_phis = [ self.domain.features_from_trajectory(t) for t in raw_trajectories ] query_options_trajectories", "DEMPREF). :param features: a list containing the feature values of", "\"rank\": result = [None] * len(rank) for i in range(len(rank)):", ":param T: if greater than 1, all samples except each", "update_func=self.update_func, beta_pref=self.beta_pref, ) if self.incl_prev_query: if len(self.demos) > 0: self.random_scenario_index", "pymc3.distributions.transforms as tr import scipy.optimize as opt import theano.tensor as", "choice # Create dictionary map from rankings to query-option features;", "AND NO DEMPREF). :param features: a list containing the feature", "w_samples) def objective(features: List, w_samples: np.ndarray) -> float: \"\"\" Maximize", "high=self.num_new_queries * upper_input_bound, size=(self.num_new_queries * z), ), args=(self.domain, w_samples), bounds=self.domain.control_bounds", "self.n_samples_summ, self.n_samples_exp, self.beta_demo, self.beta_pref, self.beta_teacher, ] self.df = pd.DataFrame(columns=[\"run #\",", "query_diff <= self.epsilon: if self.incl_prev_query: if last_query_choice.null: query_options = self._query_generator.generate_query_options(", "the samples: mean_w = np.mean(self.w_samples, axis=0) mean_w = mean_w /", "on :num_queries: number of queries to generate at each time", "w_samples.T), axis=1) / w_samples.shape[0] ) # query_option_count x 1 --", "upper=1, testval=test_val, ) # Define the prior as the unit", "\"\"\" Load the results of a preference query into the", "self.update_func == \"pick_best\": result, tmp = [phi[rank] - phi[rank]], []", "w_samples) elif self.update_func == \"approx\": return -approx_objective(features_each_q_option, w_samples) else: return", "the second is the samples that will be used to", "some variable names are modified to be consist with the", "phi_demos: np.ndarray): \"\"\" Load the demonstrations into the Sampler. :param", "Get the pre-defined agent parameters \"\"\" self._dempref_agent_parameters = self.read_param_csv(which_param_csv) \"\"\"", "sphere(w): \"\"\"Determine if w is part of the unit ball.\"\"\"", "None return trace class DemPrefQueryGenerator: \"\"\"Generate queries. Code adapted from", "the Plackett-Luce model of teacher behavior. CANNOT BE USED WITH", "designates as their main experiment. \"\"\" def __init__( self, weight_sample_count:", "for i in range(len(self.phi_prefs)) ] ) + tt.sum( self.beta_demo *", ": ] - self.phi_prefs[i][j], distribution, ) ) ) for j", "None self._query_generator = None self._first_q_session = True self._q_session_index = 0", "produces query options that (locally) maximize the maximum volume removal", "defined within the proceeding # context: model = pm.Model() with", "shape=self.dim_features, lower=-1, upper=1, testval=test_val, ) # Define the prior as", "for key in sorted(phi.keys()): if key != rank: tmp.append(phi[key] -", "volumes_removed = [] for i in range(len(features)): feature_diff = (", "of np.arrays containing feature difference vectors and # which encode", "be at least 1\" assert ( num_expectation_samples >= 1 ),", "the given queries' features \"\"\" if features.shape[0] > features.shape[1]: features", "volumes_removed.append(v_removed) return np.min(volumes_removed) def approx_objective( features: np.ndarray, w_samples: np.ndarray )", "var_w], ] self.df = self.df.append( pd.DataFrame( data, columns=[\"run #\", \"pref_iter\",", "scenario -- i.e., other agents' behavior :update_func: the update_func used;", "in DemPref:\"\"\" self.first_q_session = True self.q_session_index = 0 self.query_index =", "= self.demos if self.incl_prev_query: self.all_query_choices = [d for d in", "self.include_previous_query and not blank_traj: return [last_query_choice] + query_options_trajectories else: return", "rank in rankings: exp_rewards_sorted = [None] * len(rank) for i", "they did in DemPref:\"\"\" mean_w = np.mean(self.w_samples, axis=0) mean_w =", "func, x0=np.random.uniform( low=self.num_new_queries * lower_input_bound, high=self.num_new_queries * upper_input_bound, size=(self.num_new_queries *", "initialize weights for gradient descent.\"\"\" self.reset() return self.w_samples def reset(self)", "np.min(volumes_removed) def approx_objective( features: np.ndarray, w_samples: np.ndarray ) -> float:", "\"\"\"Generate query using approximate gradients. Code adapted from DemPref's ApproxQueryGenerator.", "visualize: bool = False, ): \"\"\" Initialize the sampler. :param", "Agent from inquire.environments.environment import Environment from inquire.interactions.feedback import Query, Trajectory", "c in enumerate(controls_set): features_each_q_option[ :, i ] = domain.features_from_trajectory( c,", "= np.array(controls) controls_set = [ controls[i * z : (i", "self.num_new_queries * self.trajectory_length, approx_grad=True, ) query_options_controls = [ opt_res[0][i *", "np.array(mean_w, copy=True).reshape(1, -1) def read_param_csv(self, which_csv: int = 0) ->", "return result elif self.update_func == \"pick_best\": def update_function(distribution): result =", "to be consist with the Inquire parlance. \"\"\" self.domain_name =", "self._query_generator is None: self._query_generator = self.DemPrefQueryGenerator( dom=domain, num_queries=self.query_option_count, trajectory_length=self.trajectory_length, num_expectation_samples=self.n_samples_exp,", "to model: rv_x = pm.Uniform( name=\"rv_x\", shape=self.dim_features, lower=-1, upper=1, testval=test_val,", "query_state: int, curr_w: np.ndarray, verbose: bool = False, ) ->", "= None, blank_traj: bool = False, ) -> List[Trajectory]: \"\"\"", "= [] for m in range(len(query_options)): for n in range(m):", "bool = False, ) -> List[Trajectory]: \"\"\" Generate self.num_queries number", "result elif self.update_func == \"pick_best\": def update_function(distribution): result = tt.sum(", "int = 0, visualize: bool = False, ): \"\"\"Initialize the", "self.update_func == \"approx\": result = phi[rank] - phi[1 - rank]", "from inquire.interactions.modalities import Preference import matplotlib.pyplot as plt import numpy", "parameter measuring irrationality of teacher in selecting preferences \"\"\" self.query_option_count", "consist with the Inquire parlance. \"\"\" self.domain_name = self._dempref_agent_parameters[\"domain\"][0] self.teacher_type", "<= 1: break # Get a sampling trace (and avoid", "low=self.num_new_queries * lower_input_bound, high=self.num_new_queries * upper_input_bound, size=(self.num_new_queries * z), ),", "querying. Code adapted from original DemPref agent. \"\"\" def __init__(", "trajectory_length=self.trajectory_length, num_expectation_samples=self.n_samples_exp, include_previous_query=self.incl_prev_query, generate_scenario=self.gen_scenario, update_func=self.update_func, beta_pref=self.beta_pref, ) if self.incl_prev_query: if", "inquire/tests/evaluation.py and the respective domain. \"\"\" self._weight_sample_count = weight_sample_count self._trajectory_sample_count", "indicated .csv and convert it to a dictionary: chosen_csv =", "np.array( [f - features[i] for f in features] ) #", "update_func used; the options are \"pick_best\", \"approx\", and \"rank\" :beta_pref:", "def func(controls: np.ndarray, *args) -> float: \"\"\"Minimize via L_BFGS. :param", "): raise Exception( update_func + \" is not a valid", "DemPref; useful to other agents ::domain: The task's environment ::feedback:", "* tt.dot( self.phi_prefs[i][ j:, : ] - self.phi_prefs[i][j], distribution, )", "np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed) def approx_objective( features:", "self.n_samples_exp = self._dempref_agent_parameters[\"n_samples_exp\"][0] self.beta_demo = self._dempref_agent_parameters[\"beta_demo\"][0] self.beta_pref = self._dempref_agent_parameters[\"beta_pref\"][0] self.beta_teacher", "return -rank_objective(features_each_q_option, w_samples) def objective(features: List, w_samples: np.ndarray) -> float:", "i in range(len(features)): feature_diff = ( features[i] - features[1 -", "0 ), \"Cannot include previous query if no demonstration is", "creates a sampler via PyMC3 version 3.5; this codebase adapts", "w_samples = args[1] controls = np.array(controls) controls_set = [ controls[i", "/ w_samples.shape[0] ) # query_option_count x 1 -- summed across", "else: query_options = self._query_generator.generate_query_options( self.w_samples ) query_diffs = [] for", "w_samples) -> float: \"\"\" The ranking maximum volume removal objective", "objective function, evaluated on the given queries' features \"\"\" #", "dimension n_dem -by- self.dim_features \"\"\" self.phi_demos = phi_demos def load_prefs(self,", "whether one of the queries is the previously selected query", "= dim_features self.update_func = update_func self.beta_demo = beta_demo self.beta_pref =", "), \"Cannot include previous query if no demonstration is provided\"", "Potential is a \"potential term\" defined as an \"additional #", "len(self.demos) > 0: self.random_scenario_index = np.random.randint(len(self.demos)) else: self.random_scenario_index = 0", "Define the prior as the unit ball centered at 0:", "self.incl_prev_query is True :param blank_traj: True is last_query_choice is blank.", "self.beta_pref = self._dempref_agent_parameters[\"beta_pref\"][0] self.beta_teacher = self._dempref_agent_parameters[\"beta_teacher\"][0] \"\"\"If we want to", "lower_input_bound = [ x[0] for x in self.domain.control_bounds ] *", "whether we want to generate the scenario -- i.e., other", "range(len(self.phi_prefs)) ] ) + tt.sum( self.beta_demo * tt.dot(self.phi_demos, distribution) )", "for x in self.domain.control_bounds ] * self.trajectory_length upper_input_bound = [", "- self.phi_prefs[i][j], distribution, ) ) ) for j in range(", "to properly index data: if self.first_q_session: self.first_q_session = False else:", "a list of np.arrays containing feature difference vectors and #", "lower=-1, upper=1, testval=test_val, ) # Define the prior as the", "all_samples = trace.sel( draw=slice(burn, None) ).posterior.rv_x.values all_samples = all_samples.reshape( all_samples.shape[0]", "i ] = domain.features_from_trajectory( c, controls_as_input=True ) if self.include_previous_query and", "domain to generate queries on :num_queries: number of queries to", "result.extend(tmp) self.phi_prefs.append(np.array(result)) def clear_pref(self): \"\"\"Clear all preference information from the", "int, trajectory_length: int, num_expectation_samples: int, include_previous_query: bool, generate_scenario: bool, update_func:", "be False ) assert ( self.generate_scenario is False ), \"Cannot", "in all_csvs]) sorted_csvs = sorted(all_csvs, key=os.path.getmtime) sorted_csvs = [Path(c) for", "in query_options_controls ] raw_phis = [ self.domain.features_from_trajectory(t) for t in", "= all_samples.reshape( all_samples.shape[0] * all_samples.shape[1], -1 ) w_samples = np.array([r", "self.demos ] self._sampler.load_demo(np.array(phi_demos)) self.cleaned_demos = self.demos if self.incl_prev_query: self.all_query_choices =", "self.trajectory_length, self.incl_prev_query, self.gen_scenario, self.n_pref_iters, self.epsilon, self.n_samples_summ, self.n_samples_exp, self.beta_demo, self.beta_pref, self.beta_teacher,", "update function if query_option_count > 2\" elif not ( self.update_func", "if self.include_previous_query and not blank_traj: return [last_query_choice] + query_options_trajectories else:", "i in range(len(raw_trajectories)) ] if self.include_previous_query and not blank_traj: return", "axis=1, ) if self.update_func == \"pick_best\": return -objective(features_each_q_option, w_samples) elif", "= np.mean(self.w_samples, axis=0) mean_w = mean_w / np.linalg.norm(mean_w) var_w =", "for t in raw_trajectories ] query_options_trajectories = [ Trajectory(raw_trajectories[i], raw_phis[i])", "int = 1000) -> np.ndarray: \"\"\"Return N samples from the", "self._sampler.clear_pref() self._sampler = self.DemPrefSampler( query_option_count=self.query_option_count, dim_features=self._w_dim, update_func=self.update_func, beta_demo=self.beta_demo, beta_pref=self.beta_pref, visualize=self._visualize,", "* ( np.array(exp_rewards_sorted[i:]) - exp_rewards_sorted[i] ) ) ) volumes_removed.append(1 -", "query session.\"\"\" if self._sampler is not None: self._sampler.clear_pref() self._sampler =", "= [], w_dim: int = 4, which_param_csv: int = 0,", "- value) return np.min(volumes_removed) # The following optimization is w.r.t.", "preference information from the sampler.\"\"\" self.phi_prefs = [] def sample(self,", "list ) -> np.ndarray: \"\"\"Update the model's learned weights. ::inputs:", "phi[rank] - phi[1 - rank] elif self.update_func == \"pick_best\": result,", "update_func: str = \"approx\", beta_demo: float = 0.1, beta_pref: float", "thus reset to appropriate # state: raw_trajectories = [ self.domain.run(c)", "feature values of each query :param w_samples: samples of w,", "each query :num_expectation_samples: number of w_samples to use in approximating", "This function produces query options that (locally) maximize the maximum", "] features_each_q_option = np.zeros( (domain.w_dim, self.num_new_queries) ) for i, c", "self.trim_start, self.query_option_count, self.update_func, self.trajectory_length, self.incl_prev_query, self.gen_scenario, self.n_pref_iters, self.epsilon, self.n_samples_summ, self.n_samples_exp,", "axis=1) / w_samples.shape[0] ) # query_option_count x 1 -- summed", "time of creation: all_files = np.array(list(Path.iterdir(data_path))) all_csvs = all_files[ np.argwhere([f.suffix", "1, burn: int = 1000) -> np.ndarray: \"\"\"Return N samples", "class DemPrefSampler: \"\"\"Sample trajectories for querying. Code adapted from original", "this point. DemPref utilizes only the most recent \"\"\" if", "self.beta_demo = self._dempref_agent_parameters[\"beta_demo\"][0] self.beta_pref = self._dempref_agent_parameters[\"beta_pref\"][0] self.beta_teacher = self._dempref_agent_parameters[\"beta_teacher\"][0] \"\"\"If", "self._sampler = self.DemPrefSampler( query_option_count=self.query_option_count, dim_features=self._w_dim, update_func=self.update_func, beta_demo=self.beta_demo, beta_pref=self.beta_pref, visualize=self._visualize, )", ") -> float: \"\"\" Approximate the maximum volume removal objective.", "= 0 print(\"Generating query_options\") while query_diff <= self.epsilon: if self.incl_prev_query:", "we want to save data as they did in DemPref:\"\"\"", "and query generator: \"\"\" self._sampler = None self._w_samples = None", "is a \"potential term\" defined as an \"additional # tensor...to", "learned weights. ::inputs: ::current_weights: Irrelevant for DemPref; useful to other", "= [d for d in self.cleaned_demos] class DemPrefSampler: \"\"\"Sample trajectories", "object. Note that some variable names are modified to be", "argument is the domain, and the second is the samples", "for gradient descent.\"\"\" self.reset() return self.w_samples def reset(self) -> None:", "if last_query_choice.null: query_options = self._query_generator.generate_query_options( self.w_samples, blank_traj=True ) else: query_options", "and # features; this difference is a trait of the", "query :num_expectation_samples: number of w_samples to use in approximating the", "samples that will be used to approximate the objective function", "a sampler via PyMC3 version 3.5; this codebase adapts their", "in range( self.query_option_count ) ] ) ) for i in", "bool = False, ) -> list: \"\"\"Generate query using approximate", "except each T^{th} sample are discarded :param burn: how many", "approximating the objective function :include_previous_query: boolean for whether one of", "self.update_func == \"pick_best\": def update_function(distribution): result = tt.sum( [ -tt.log(", "of queries. This function produces query options that (locally) maximize", "np.sum( np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed) def approx_objective(", "end = time.perf_counter() print(f\"Finished computing queries in {end - start}s\")", "1) * z] for i in range(self.num_new_queries) ] end =", "in range(len(query_options)): for n in range(m): query_diffs.append( np.linalg.norm( domain.features_from_trajectory( query_options[m].trajectory", "the objects defined within the proceeding # context: model =", "self.update_func == \"pick_best\": return -objective(features_each_q_option, w_samples) elif self.update_func == \"approx\":", "must be \\ at least 1\" self.domain = dom self.num_queries", "last_query_choice = self.all_query_choices[ self.random_scenario_index ] # Generate query_options while ensuring", ":param beta_pref: parameter measuring irrationality of teacher in selecting preferences", "self._dempref_agent_parameters[\"gen_scenario\"][0] self.n_pref_iters = self._dempref_agent_parameters[\"n_pref_iters\"][0] self.epsilon = self._dempref_agent_parameters[\"epsilon\"][0] \"\"\" Instantiate the", "4, which_param_csv: int = 0, visualize: bool = False, ):", "for n in range(m): query_diffs.append( np.linalg.norm( domain.features_from_trajectory( query_options[m].trajectory ) -", "beta_pref: float, ) -> None: \"\"\" Initialize the approx query", "self.beta_pref * tt.dot( self.phi_prefs[i], distribution ) ) ) ) for", "attributes from orginal codebase's 'runner.py' object. Note that some variable", "respective domain. \"\"\" self._weight_sample_count = weight_sample_count self._trajectory_sample_count = trajectory_sample_count self._trajectory_length", "all preference information from the sampler.\"\"\" self.phi_prefs = [] def", "original DemPref agent. \"\"\" def __init__( self, query_option_count: int, dim_features:", "trace = pm.sample( 10000, tune=5000, return_inferencedata=True, init=\"adapt_diag\", ) # except:", "v_removed = 1.0 - 1.0 / np.sum( np.exp(self.beta_pref * weighted_feature_diff)", "\"\"\" def __init__( self, dom: Environment, num_queries: int, trajectory_length: int,", "\"pick_best\": return -objective(features_each_q_option, w_samples) elif self.update_func == \"approx\": return -approx_objective(features_each_q_option,", "features[i] for f in features] ) # query_option_count x feature_size", "* tt.dot(self.phi_demos, distribution) ) return result elif self.update_func == \"rank\":", "selected query. Only required if self.incl_prev_query is True :param blank_traj:", "start_state=query_state, trajectories=query_options, ) return query def update_weights( self, current_weights: np.ndarray,", "self.num_new_queries = ( self.num_queries - 1 if self.include_previous_query else self.num_queries", "elif self.update_func == \"pick_best\": result, tmp = [phi[rank] - phi[rank]],", "ranking from the preference # queries self.phi_prefs = [] def", "must be 2; will throw an assertion error otherwise :param", "ensuring that features of query_options # are epsilon apart: query_diff", "is a trait of the DemPref codebase. z = self.trajectory_length", "name=\"rv_x\", shape=self.dim_features, lower=-1, upper=1, testval=test_val, ) # Define the prior", "query_option_count: Number of queries. :param dim_features: Dimension of feature vectors.", "None: break if self._visualize: az.plot_trace(trace) plt.show() input(\"Press enter to continue\")", "* self.trajectory_length upper_input_bound = [ x[1] for x in self.domain.control_bounds", "potential is effectively # the model's log-likelihood. p = pm.Potential(\"sphere\",", "feedback == []: # No feedback yet received return self.w_samples", "choice_index = query_options.index(choice) if self.incl_prev_query: self.all_query_choices[self.random_scenario_index] = choice # Create", "summed across w_samples v_removed = 1.0 - 1.0 / np.sum(", "feature_size # w_samples: n_samples x feature_size exp_rewards = ( np.sum(np.dot(features,", "did in DemPref:\"\"\" self.first_q_session = True self.q_session_index = 0 self.query_index", "# features; this difference is a trait of the DemPref", "2; will throw an assertion error otherwise :param beta_demo: parameter", "self.generate_scenario = ( generate_scenario # Currently must be False )", "with the Inquire parlance. \"\"\" self.domain_name = self._dempref_agent_parameters[\"domain\"][0] self.teacher_type =", "w_samples: n_samples x feature_size exp_rewards = ( np.sum(np.dot(features, w_samples.T), axis=1)", "# optimization is w.r.t. the linear combination of weights and", "self.w_samples = self._sampler.sample(N=self.n_samples_summ) # Return the new weights from the", "utilizes only the most recent \"\"\" if feedback == []:", "step :trajectory_length: the length of each query :num_expectation_samples: number of", "blank_traj=True ) else: query_options = self._query_generator.generate_query_options( self.w_samples, last_query_choice ) else:", "= pd.read_csv(chosen_csv) params_dict = df.to_dict() return params_dict def process_demonstrations( self,", "self.phi_prefs[i], distribution ) ) ) ) for i in range(len(self.phi_prefs))", "for i in range(len(raw_trajectories)) ] if self.include_previous_query and not blank_traj:", "= self._dempref_agent_parameters[\"beta_demo\"][0] self.beta_pref = self._dempref_agent_parameters[\"beta_pref\"][0] self.beta_teacher = self._dempref_agent_parameters[\"beta_teacher\"][0] \"\"\"If we", "float = 0.1, beta_pref: float = 1.0, visualize: bool =", "trace.sel( draw=slice(burn, None) ).posterior.rv_x.values all_samples = all_samples.reshape( all_samples.shape[0] * all_samples.shape[1],", "-> float: \"\"\" Approximate the maximum volume removal objective. :param", "] end = time.perf_counter() print(f\"Finished computing queries in {end -", "generate scenario when using approximate gradients\" self.update_func = update_func self.beta_pref", "[] rankings = itertools.permutations( list(range(self.num_queries)) ) # iterating over all", "= 0 last_query_choice = self.all_query_choices[ self.random_scenario_index ] # Generate query_options", "def read_param_csv(self, which_csv: int = 0) -> dict: \"\"\"Read an", "function, evaluated on the given queries' features \"\"\" if features.shape[0]", "range(m): query_diffs.append( np.linalg.norm( domain.features_from_trajectory( query_options[m].trajectory ) - domain.features_from_trajectory( query_options[n].trajectory )", "np.sum(np.dot(features, w_samples.T), axis=1) / w_samples.shape[0] ) # query_option_count x 1", "np.minimum( 1.0, np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed) def", "of a preference query into the Sampler. :param phi: a", "else: # Use the most recent Choice in feedback: query_options", "incl_prev_) :return: a list of trajectories (queries) \"\"\" start =", "\"QueryGenerator.__init__: num_queries must be at least 1\" assert ( trajectory_length", "pd.DataFrame( data, columns=[\"run #\", \"pref_iter\", \"type\", \"value\"] ), ignore_index=True, )", ") assert ( self.generate_scenario is False ), \"Cannot generate scenario", "query_options[m].trajectory ) - domain.features_from_trajectory( query_options[n].trajectory ) ) ) query_diff =", "plt.show() input(\"Press enter to continue\") az.plot_energy(trace) plt.show() input(\"Press enter to", "\"approx\", query_option_count must be 2; will throw an assertion error", "approximate gradients. Code adapted from DemPref's ApproxQueryGenerator. \"\"\" if self._query_generator", "include previous query if no demonstration is provided\" self.n_samples_summ =", "query option :param w_samples: w_samples of w used to approximate", "None: \"\"\" Initialize the approx query generation. Note: this class", "be at least 1\" assert ( trajectory_length >= 1 ),", "self.trajectory_length = self._dempref_agent_parameters[ \"trajectory_length\" ][0] self.incl_prev_query = self._dempref_agent_parameters[ \"incl_prev_query\" ][0]", "== \"term\": assert ( self.n_demos > 0 ), \"Cannot include", "1000) -> np.ndarray: \"\"\"Return N samples from the distribution. The", "self._dempref_agent_parameters[\"beta_pref\"][0] self.beta_teacher = self._dempref_agent_parameters[\"beta_teacher\"][0] \"\"\"If we want to save data", "+ 1) * z] for i in range(self.num_new_queries) ] end", "\"\"\" Approximate the maximum volume removal objective. :param features: the", "self._dempref_agent_parameters[\"epsilon\"][0] \"\"\" Instantiate the DemPref-specific sampler and query generator: \"\"\"", "down different feature-differences in a single term in PL-update tt.exp(", "not a valid update function.\" ) # feature vectors from", "the value of the objective function, evaluated on the given", "evaluated on the given queries' features \"\"\" # features: query_option_count", "def __init__( self, query_option_count: int, dim_features: int, update_func: str =", "np.ndarray): \"\"\" Load the demonstrations into the Sampler. :param demos:", "[ x[1] for x in self.domain.control_bounds ] * self.trajectory_length opt_res", "interaction_types self._visualize = visualize \"\"\" Get the pre-defined agent parameters", "self.update_func == \"approx\": assert ( self.query_option_count == 2 ), \"Cannot", "# w_samples: n_samples x feature_size exp_rewards = ( np.sum(np.dot(features, w_samples.T),", "discarded :param burn: how many samples before the chain converges;", "- 1 if self.include_previous_query else self.num_queries ) def generate_query_options( self,", "phi[rank]], [] for key in sorted(phi.keys()): if key != rank:", "draw. :param T: if greater than 1, all samples except", "as they did in DemPref:\"\"\" mean_w = np.mean(self.w_samples, axis=0) mean_w", "False else: self.q_session_index += 1 data = [ [self.q_session_index, 0,", "selecting her query ::Inquire-specific inputs: :start_state: The state from which", "self._w_samples = None self._query_generator = None self._first_q_session = True self._q_session_index", "Environment, num_queries: int, trajectory_length: int, num_expectation_samples: int, include_previous_query: bool, generate_scenario:", "c in query_options_controls ] raw_phis = [ self.domain.features_from_trajectory(t) for t", "tr import scipy.optimize as opt import theano.tensor as tt class", "+= 1 data = [ [self.q_session_index, 0, \"mean\", mean_w], [self.q_session_index,", "def get_trace(self, test_val: np.ndarray) -> az.InferenceData: \"\"\"Create an MCMC trace.\"\"\"", "), ignore_index=True, ) def generate_query( self, domain: Environment, query_state: int,", "-> None: \"\"\"Prepare for new query session.\"\"\" if self._sampler is", "the approx query generation. Note: this class generates queries using", "list of w_samples drawn \"\"\" \"\"\"Define model for MCMC. NOTE", "phi = {k: features[k] for k in range(len(query_options))} self._sampler.load_prefs(phi, choice_index)", "from the sampler.\"\"\" self.phi_prefs = [] def sample(self, N: int,", "feature vectors for each demonstration; has dimension n_dem -by- self.dim_features", "features \"\"\" if features.shape[0] > features.shape[1]: features = features.T volumes_removed", "from inquire.interactions.feedback import Query, Trajectory from inquire.interactions.modalities import Preference import", "evaluated on the given queries' features \"\"\" volumes_removed = []", "\"Cannot include previous query if no demonstration is provided\" self.n_samples_summ", "import scipy.optimize as opt import theano.tensor as tt class DemPref(Agent):", "sorted_csvs] # Select the indicated .csv and convert it to", "agent according to arguments corresponding to what the the original", "= phi_demos def load_prefs(self, phi: Dict, rank): \"\"\" Load the", "ApproxQueryGenerator. \"\"\" if self._query_generator is None: self._query_generator = self.DemPrefQueryGenerator( dom=domain,", "if self.update_func == \"rank\": result = [None] * len(rank) for", ":param w_samples: w_samples of w used to approximate the objective", "a list of trajectories (queries) \"\"\" start = time.perf_counter() def", "features_each_q_option[ :, i ] = domain.features_from_trajectory( c, controls_as_input=True ) if", "np.sum(np.dot(feature_diff, w_samples.T), axis=1) / w_samples.shape[0] ) # query_option_count x 1", "0 ] self.n_samples_exp = self._dempref_agent_parameters[\"n_samples_exp\"][0] self.beta_demo = self._dempref_agent_parameters[\"beta_demo\"][0] self.beta_pref =", ") ] ) ) for i in range(len(self.phi_prefs)) ] )", "f in all_csvs]) sorted_csvs = sorted(all_csvs, key=os.path.getmtime) sorted_csvs = [Path(c)", "[ domain.features_from_trajectory(x.trajectory) for x in query_options ] phi = {k:", "e.g. if creation_index = 0, use the dempref dempref_agent.csv most", "= ( np.sum(np.dot(feature_diff, w_samples.T)) / w_samples.shape[0] ) # 1 x", "w_samples: np.ndarray ) -> float: \"\"\" Approximate the maximum volume", "j in range( self.query_option_count ) ] ) ) for i", "in {end - start}s\") # Note the domain was reset", "the most recent \"\"\" if feedback == []: # No", "into the Sampler. :param phi: a dictionary mapping rankings (0,...,query_option_count-1)", ") # Define the prior as the unit ball centered", "None: \"\"\"Prepare for new query session.\"\"\" if self._sampler is not", "1, 0 for i in range(len(rank) - 1): value *=", "original paper's codebase designates as their main experiment. \"\"\" def", "query_options # are epsilon apart: query_diff = 0 print(\"Generating query_options\")", "for all queries :param args: the first argument is the", "mean_w / np.linalg.norm(mean_w) return np.array(mean_w, copy=True).reshape(1, -1) def read_param_csv(self, which_csv:", "NO DEMPREF). :param features: a list containing the feature values", "of w_samples to draw. :param T: if greater than 1,", "on the given queries' features \"\"\" if features.shape[0] > features.shape[1]:", "update_function while True: test_value = np.random.uniform( low=-1, high=1, size=self.dim_features )", "] query_options_trajectories = [ Trajectory(raw_trajectories[i], raw_phis[i]) for i in range(len(raw_trajectories))", "feature_diff = ( features[i] - features[1 - i] ) #", "int = 1, burn: int = 1000) -> np.ndarray: \"\"\"Return", "Select the indicated .csv and convert it to a dictionary:", "def load_prefs(self, phi: Dict, rank): \"\"\" Load the results of", ") ) query_diff = max(query_diffs) query = Query( query_type=Preference, task=None,", "-approx_objective(features_each_q_option, w_samples) else: return -rank_objective(features_each_q_option, w_samples) def objective(features: List, w_samples:", "False, ) -> List[Trajectory]: \"\"\" Generate self.num_queries number of queries.", "key != rank: tmp.append(phi[key] - phi[rank]) result.extend(tmp) self.phi_prefs.append(np.array(result)) def clear_pref(self):", "query session; domain.run(c) will thus reset to appropriate # state:", ") if self.include_previous_query and not blank_traj: features_each_q_option = np.append( features_each_q_option,", "= 0) -> dict: \"\"\"Read an agent-parameterization .csv. ::inputs: :creation_index:", "features[1 - i] ) # 1 x feature_size weighted_feature_diff =", ") + tt.sum( self.beta_demo * tt.dot(self.phi_demos, distribution) ), ) return", "tensor...to be added to the model logp\"(PyMC3 developer # guide).", "Trajectory(raw_trajectories[i], raw_phis[i]) for i in range(len(raw_trajectories)) ] if self.include_previous_query and", ") query_diffs = [] for m in range(len(query_options)): for n", "\"\"\"Initialize the agent. Note we needn't maintain a domain's start", "dictionary: chosen_csv = sorted_csvs[-which_csv] df = pd.read_csv(chosen_csv) params_dict = df.to_dict()", "-tt.nnet.relu( -self.beta_pref * tt.dot(self.phi_prefs[i], distribution) ) for i in range(len(self.phi_prefs))", "\"\"\" Instantiate the DemPref-specific sampler and query generator: \"\"\" self._sampler", "x feature_size # w_samples: n_samples x feature_size exp_rewards = (", "True: test_value = np.random.uniform( low=-1, high=1, size=self.dim_features ) test_value =", "data = [ [self.q_session_index, 0, \"mean\", mean_w], [self.q_session_index, 0, \"var\",", ">= 1 ), \"QueryGenerator.__init__: num_expectation_samples must be \\ at least", "= \"approx\", beta_demo: float = 0.1, beta_pref: float = 1.0,", "chain converges; these initial samples are discarded :return: list of", "# Potential is a \"potential term\" defined as an \"additional", "handled in inquire/tests/evaluation.py and the respective domain. \"\"\" self._weight_sample_count =", "bool = False, ): \"\"\"Initialize the agent. Note we needn't", "np.ndarray, verbose: bool = False, ) -> list: \"\"\"Generate query", "for f in all_files]) ] all_csvs = np.array([str(f[0]).strip() for f", "length of each query :num_expectation_samples: number of w_samples to use", "self.w_samples = self._sampler.sample(N=self.n_samples_summ) \"\"\"If we want to save data as", "= None self._first_q_session = True self._q_session_index = 0 self._query_index =", "agents ::domain: The task's environment ::feedback: A list of the", "read_param_csv(self, which_csv: int = 0) -> dict: \"\"\"Read an agent-parameterization", "update_function(distribution): result = tt.sum( [ -tt.nnet.relu( -self.beta_pref * tt.dot(self.phi_prefs[i], distribution)", "all_files]) ] all_csvs = np.array([str(f[0]).strip() for f in all_csvs]) sorted_csvs", "the objective function :return: the value of the objective function", "features: the feature values of each query option :param w_samples:", "\"pick_best\", and \"approx\". To use \"approx\", query_option_count must be 2;", "\"rank\" or self.update_func == \"pick_best\" ): raise Exception( update_func +", "for i in range(len(rank) - 1): value *= 1.0 /", ") # query_option_count x 1 -- summed across w_samples volumes_removed", "distribution) ) for i in range(len(self.phi_prefs)) ] ) + tt.sum(", "include_previous_query=self.incl_prev_query, generate_scenario=self.gen_scenario, update_func=self.update_func, beta_pref=self.beta_pref, ) if self.incl_prev_query: if len(self.demos) >", "= np.mean(self.w_samples, axis=0) mean_w = mean_w / np.linalg.norm(mean_w) return np.array(mean_w,", "in descending order by time of creation: all_files = np.array(list(Path.iterdir(data_path)))", "= choice # Create dictionary map from rankings to query-option", "Note that some variable names are modified to be consist", "samples before the chain converges; these initial samples are discarded", "blank_traj: True is last_query_choice is blank. (Only True if not", "all_samples = all_samples.reshape( all_samples.shape[0] * all_samples.shape[1], -1 ) w_samples =", "tt.sum( tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i], distribution ) ) )", ") for j in range( self.query_option_count ) ] ) )", ":param features: a list containing the feature values of each", "self, current_weights: np.ndarray, domain: Environment, feedback: list ) -> np.ndarray:", "range( self.query_option_count ) ] ) ) for i in range(len(self.phi_prefs))", "= [ domain.features_from_trajectory(x.trajectory) for x in self.demos ] self._sampler.load_demo(np.array(phi_demos)) self.cleaned_demos", ":param controls: an array, concatenated to contain the control input", "= include_previous_query self.generate_scenario = ( generate_scenario # Currently must be", "of Hamilitonian Monte Carlo MCMC): https://arxiv.org/abs/1111.4246. \"\"\" # Define update", "update_func: str, beta_pref: float, ) -> None: \"\"\" Initialize the", "feature vectors. :param update_func: options are \"rank\", \"pick_best\", and \"approx\".", "self._query_generator.generate_query_options( self.w_samples ) query_diffs = [] for m in range(len(query_options)):", "this instance, the potential is effectively # the model's log-likelihood.", "\"value\"]) def initialize_weights(self, domain: Environment) -> np.ndarray: \"\"\"Randomly initialize weights", "in range(len(self.phi_prefs)) ] ) + tt.sum( self.beta_demo * tt.dot(self.phi_demos, distribution)", "domain's # optimization is w.r.t. the linear combination of weights", "controls_set = [ controls[i * z : (i + 1)", "result self.update_function = update_function while True: test_value = np.random.uniform( low=-1,", "= True self._q_session_index = 0 self._query_index = 0 self._w_dim =", "-> float: \"\"\"Minimize via L_BFGS. :param controls: an array, concatenated", "distribution, ) ) ) for j in range( self.query_option_count )", "Note: We instantiate the agent according to arguments corresponding to", "self._dempref_agent_parameters[ \"trajectory_length\" ][0] self.incl_prev_query = self._dempref_agent_parameters[ \"incl_prev_query\" ][0] self.gen_scenario =", "\"\"\" self.domain_name = self._dempref_agent_parameters[\"domain\"][0] self.teacher_type = self._dempref_agent_parameters[\"teacher_type\"][0] self.n_demos = self._dempref_agent_parameters[\"n_demos\"][0]", "CANNOT BE USED WITH (incl_prev_QUERY AND NO DEMPREF). :param features:", "visualize: bool = False, ): \"\"\"Initialize the agent. Note we", "# Use the most recent Choice in feedback: query_options =", "self.all_query_choices[self.random_scenario_index] = choice # Create dictionary map from rankings to", "of the DemPref codebase. z = self.trajectory_length * self.domain.control_size lower_input_bound", "self.beta_pref, self.beta_teacher, ] self.df = pd.DataFrame(columns=[\"run #\", \"pref_iter\", \"type\", \"value\"])", "# which encode the ranking from the preference # queries", "1\" self.domain = dom self.num_queries = num_queries self.trajectory_length = trajectory_length", "given queries' features \"\"\" # features: query_option_count x feature_size #", "\\ at least 1\" self.domain = dom self.num_queries = num_queries", "distribution) ) return result elif self.update_func == \"pick_best\": def update_function(distribution):", "np.argwhere([f.suffix == \".csv\" for f in all_files]) ] all_csvs =", ":trajectory_length: the length of each query :num_expectation_samples: number of w_samples", "elif self.update_func == \"approx\": result = phi[rank] - phi[1 -", "exp_rewards_sorted[rank[i]] = exp_rewards[i] value, i = 1, 0 for i", "( num_expectation_samples >= 1 ), \"QueryGenerator.__init__: num_expectation_samples must be \\", "that's handled in inquire/tests/evaluation.py and the respective domain. \"\"\" self._weight_sample_count", "r in all_samples]) return w_samples def get_trace(self, test_val: np.ndarray) ->", "extension of Hamilitonian Monte Carlo MCMC): https://arxiv.org/abs/1111.4246. \"\"\" # Define", "containing feature vectors for each demonstration; has dimension n_dem -by-", "we want to generate the scenario -- i.e., other agents'", "\" function must be one of the provided options\") if", "must be one of the provided options\") if self.incl_prev_query and", "= self._dempref_agent_parameters[ \"trajectory_length\" ][0] self.incl_prev_query = self._dempref_agent_parameters[ \"incl_prev_query\" ][0] self.gen_scenario", "beta_pref: float = 1.0, visualize: bool = False, ): \"\"\"", "class DemPrefQueryGenerator: \"\"\"Generate queries. Code adapted from original DemPref agent.", "init=\"adapt_diag\", ) # except: except ( pm.SamplingError, pm.parallel_sampling.ParallelSamplingError, ): return", "self.q_session_index += 1 data = [ [self.q_session_index, 0, \"mean\", mean_w],", "unit ball centered at 0: def sphere(w): \"\"\"Determine if w", "part of the unit ball.\"\"\" w_sum = pm.math.sqr(w).sum() result =", "features.shape[0] > features.shape[1]: features = features.T volumes_removed = [] for", "df.to_dict() return params_dict def process_demonstrations( self, trajectories: list, domain: Environment", "[self.q_session_index, 0, \"var\", var_w], ] self.df = self.df.append( pd.DataFrame( data,", "False ) assert ( self.generate_scenario is False ), \"Cannot generate", "phi: a dictionary mapping rankings (0,...,query_option_count-1) to feature vectors \"\"\"", "to draw. :param T: if greater than 1, all samples", "np.linalg.norm(r) for r in all_samples]) return w_samples def get_trace(self, test_val:", "Hamilitonian Monte Carlo MCMC): https://arxiv.org/abs/1111.4246. \"\"\" # Define update function:", "w.r.t. the linear combination of weights and # features; this", "the DemPref-specific sampler and query generator: \"\"\" self._sampler = None", "), \"Cannot use approximation to update function if query_option_count >", "np.ndarray) -> az.InferenceData: \"\"\"Create an MCMC trace.\"\"\" # model accumulates", "from DemPref's ApproxQueryGenerator. \"\"\" if self._query_generator is None: self._query_generator =", "function, evaluated on the given queries' features \"\"\" volumes_removed =", "][0] self.incl_prev_query = self._dempref_agent_parameters[ \"incl_prev_query\" ][0] self.gen_scenario = self._dempref_agent_parameters[\"gen_scenario\"][0] self.n_pref_iters", "( self.n_demos > 0 ), \"Cannot include previous query if", "in sorted(phi.keys()): if key != rank: tmp.append(phi[key] - phi[rank]) result.extend(tmp)", "self.dim_features = dim_features self.update_func = update_func self.beta_demo = beta_demo self.beta_pref", "tt class DemPref(Agent): \"\"\"A preference-querying agent seeded with demonstrations. Note:", "use the NUTS sampling algorithm (an extension of Hamilitonian Monte", "[phi[rank] - phi[rank]], [] for key in sorted(phi.keys()): if key", "print(f\"Finished computing queries in {end - start}s\") # Note the", "dim_features: int, update_func: str = \"approx\", beta_demo: float = 0.1,", "\"\"\" self.query_option_count = query_option_count self.dim_features = dim_features self.update_func = update_func", "within the proceeding # context: model = pm.Model() with model:", "= args[0] w_samples = args[1] controls = np.array(controls) controls_set =", "they did in DemPref:\"\"\" self.first_q_session = True self.q_session_index = 0", "def update_function(distribution): result = ( tt.sum( # sum across different", "agent. Note we needn't maintain a domain's start state; that's", "mean_w = np.mean(self.w_samples, axis=0) mean_w = mean_w / np.linalg.norm(mean_w) var_w", "session.\"\"\" if self._sampler is not None: self._sampler.clear_pref() self._sampler = self.DemPrefSampler(", "first argument is the domain, and the second is the", "features of query_options # are epsilon apart: query_diff = 0", "for the given set of controls \"\"\" domain = args[0]", "+ tt.sum( self.beta_demo * tt.dot(self.phi_demos, distribution) ) return result elif", "-> None: \"\"\"Generate demonstrations to seed the querying process.\"\"\" self.demos", "z : (i + 1) * z] for i in", "as az from inquire.agents.agent import Agent from inquire.environments.environment import Environment", "import pymc3.distributions.transforms as tr import scipy.optimize as opt import theano.tensor", "the indicated .csv and convert it to a dictionary: chosen_csv", "\"\"\"Determine if w is part of the unit ball.\"\"\" w_sum", "objective function :include_previous_query: boolean for whether one of the queries", "list(range(self.num_queries)) ) # iterating over all possible rankings for rank", "pm.parallel_sampling.ParallelSamplingError, ): return None return trace class DemPrefQueryGenerator: \"\"\"Generate queries.", "Maximize the volume removal objective. :param features: a list containing", "trajectories phi_demos = [ domain.features_from_trajectory(x.trajectory) for x in self.demos ]", "return w_samples def get_trace(self, test_val: np.ndarray) -> az.InferenceData: \"\"\"Create an", "self.query_option_count = self._dempref_agent_parameters[ \"query_option_count\" ][0] self.update_func = self._dempref_agent_parameters[\"update_func\"][0] self.trajectory_length =", "data: if self.first_q_session: self.first_q_session = False else: self.q_session_index += 1", "in range(len(raw_trajectories)) ] if self.include_previous_query and not blank_traj: return [last_query_choice]", "[d for d in self.cleaned_demos] class DemPrefSampler: \"\"\"Sample trajectories for", "(\"Update\" \" function must be one of the provided options\")", "defined as an \"additional # tensor...to be added to the", "draw=slice(burn, None) ).posterior.rv_x.values all_samples = all_samples.reshape( all_samples.shape[0] * all_samples.shape[1], -1", "used to approximate the objective function :return: the value of", "current_weights: np.ndarray, domain: Environment, feedback: list ) -> np.ndarray: \"\"\"Update", "which a trajectory begins. \"\"\" assert ( num_queries >= 1", "Sampler. :param demos: a Numpy array containing feature vectors for", "DemPref agent. \"\"\" def __init__( self, query_option_count: int, dim_features: int,", "# The following optimization is w.r.t. volume removal; the domain's", ":return: list of w_samples drawn \"\"\" \"\"\"Define model for MCMC.", "x in self.domain.control_bounds ] * self.trajectory_length upper_input_bound = [ x[1]", "and convert it to a dictionary: chosen_csv = sorted_csvs[-which_csv] df", "from Learning Reward Functions by Integrating Human Demonstrations and Preferences.", "using approximate gradients\" self.update_func = update_func self.beta_pref = beta_pref self.num_new_queries", "j:, : ] - self.phi_prefs[i][j], distribution, ) ) ) for", "uses demonstrations and preferences. Code adapted from Learning Reward Functions", "= ( self.num_queries - 1 if self.include_previous_query else self.num_queries )", "the objective function for the given set of controls \"\"\"", "DemPrefSampler: \"\"\"Sample trajectories for querying. Code adapted from original DemPref", "to approximate the objective :return: the value of the objective", "the model's learned weights. ::inputs: ::current_weights: Irrelevant for DemPref; useful", "Samples of w :param last_query_choice: The previously selected query. Only", "Code adapted from Learning Reward Functions by Integrating Human Demonstrations", ") for i, c in enumerate(controls_set): features_each_q_option[ :, i ]", "\"\"\" volumes_removed = [] for i in range(len(features)): feature_diff =", "demonstrations :param beta_pref: parameter measuring irrationality of teacher in selecting", "creation_index = 0, use the dempref dempref_agent.csv most recently created.", "all queries :param args: the first argument is the domain,", "self._trajectory_sample_count = trajectory_sample_count self._trajectory_length = trajectory_length self._interaction_types = interaction_types self._visualize", "queries in {end - start}s\") # Note the domain was", "len(rank) for i in range(len(rank)): exp_rewards_sorted[rank[i]] = exp_rewards[i] value, i", "properly index data: if self.first_q_session: self.first_q_session = False else: self.q_session_index", "trajectory_length: int, num_expectation_samples: int, include_previous_query: bool, generate_scenario: bool, update_func: str,", "np.array(controls) controls_set = [ controls[i * z : (i +", "\"\"\" Load the demonstrations into the Sampler. :param demos: a", "of the unit ball.\"\"\" w_sum = pm.math.sqr(w).sum() result = tt.switch(", "raw_phis[i]) for i in range(len(raw_trajectories)) ] if self.include_previous_query and not", "self.cleaned_demos] class DemPrefSampler: \"\"\"Sample trajectories for querying. Code adapted from", "in features] ) # query_option_count x feature_size weighted_feature_diff = (", "query_option_count x feature_size # w_samples: n_samples x feature_size exp_rewards =", "dict: \"\"\"Read an agent-parameterization .csv. ::inputs: :creation_index: A time-descending .csv", "\"rank\": def update_function(distribution): result = ( tt.sum( # sum across", "blank. (Only True if not using Dempref but using incl_prev_)", "weights and # features; this difference is a trait of", "continue\") az.plot_energy(trace) plt.show() input(\"Press enter to continue\") az.plot_posterior(trace) plt.show() input(\"Press", ") ) ) query_diff = max(query_diffs) query = Query( query_type=Preference,", "least 1\" assert ( trajectory_length >= 1 ), \"QueryGenerator.__init__: trajectory_length", "w, used to approximate the objective :return: the value of", "# load into sampler: features = [ domain.features_from_trajectory(x.trajectory) for x", "DemPref's ApproxQueryGenerator. \"\"\" if self._query_generator is None: self._query_generator = self.DemPrefQueryGenerator(", "self.trajectory_length opt_res = opt.fmin_l_bfgs_b( func, x0=np.random.uniform( low=self.num_new_queries * lower_input_bound, high=self.num_new_queries", "upper_input_bound = [ x[1] for x in self.domain.control_bounds ] *", "np.random.randint(len(self.demos)) else: self.random_scenario_index = 0 last_query_choice = self.all_query_choices[ self.random_scenario_index ]", "= features.T volumes_removed = [] for i in range(len(features)): feature_diff", "seed before beginning # this query session; domain.run(c) will thus", "if self.include_previous_query else self.num_queries ) def generate_query_options( self, w_samples: np.ndarray,", "volume removal objective. :param features: a list containing the feature", "original DemPref agent. \"\"\" def __init__( self, dom: Environment, num_queries:", "be one of the provided options\") if self.incl_prev_query and self.teacher_type", "( tt.sum( # sum across different queries [ tt.sum( #", "objective :return: the value of the objective function, evaluated on", "features.shape[1]: features = features.T volumes_removed = [] for i in", "= phi[rank[i]] elif self.update_func == \"approx\": result = phi[rank] -", "generate_query_options( self, w_samples: np.ndarray, last_query_choice: Trajectory = None, blank_traj: bool", "are epsilon apart: query_diff = 0 print(\"Generating query_options\") while query_diff", "= phi[rank] - phi[1 - rank] elif self.update_func == \"pick_best\":", "= self._dempref_agent_parameters[\"n_demos\"][0] self.gen_demos = self._dempref_agent_parameters[\"gen_demos\"][0] self.opt_iter_count = self._dempref_agent_parameters[\"opt_iter_count\"][ 0 ]", "\"\"\" def __init__( self, weight_sample_count: int, trajectory_sample_count: int, trajectory_length: int,", "::current_weights: Irrelevant for DemPref; useful to other agents ::domain: The", "and the respective domain. \"\"\" self._weight_sample_count = weight_sample_count self._trajectory_sample_count =", "[ -tt.nnet.relu( -self.beta_pref * tt.dot(self.phi_prefs[i], distribution) ) for i in", "the domain, and the second is the samples that will", "axis=0) mean_w = mean_w / np.linalg.norm(mean_w) var_w = np.var(self.w_samples, axis=0)", "demonstrations to seed the querying process.\"\"\" self.demos = trajectories phi_demos", "if query_option_count > 2\" elif not ( self.update_func == \"rank\"", "# are epsilon apart: query_diff = 0 print(\"Generating query_options\") while", "self._weight_sample_count = weight_sample_count self._trajectory_sample_count = trajectory_sample_count self._trajectory_length = trajectory_length self._interaction_types", "to appropriate # state: raw_trajectories = [ self.domain.run(c) for c", "import numpy as np import pandas as pd import pymc3", "w_dim: int = 4, which_param_csv: int = 0, visualize: bool", "\"type\", \"value\"]) def initialize_weights(self, domain: Environment) -> np.ndarray: \"\"\"Randomly initialize", "= 4, which_param_csv: int = 0, visualize: bool = False,", "list, domain: Environment ) -> None: \"\"\"Generate demonstrations to seed", "[ domain.features_from_trajectory(x.trajectory) for x in self.demos ] self._sampler.load_demo(np.array(phi_demos)) self.cleaned_demos =", "c in sorted_csvs] # Select the indicated .csv and convert", ") # 1 x feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T))", "maintain a domain's start state; that's handled in inquire/tests/evaluation.py and", "= dom self.num_queries = num_queries self.trajectory_length = trajectory_length self.num_expectation_samples =", "] raw_phis = [ self.domain.features_from_trajectory(t) for t in raw_trajectories ]", "axis=0) mean_w = mean_w / np.linalg.norm(mean_w) return np.array(mean_w, copy=True).reshape(1, -1)", "irrationality of teacher in selecting preferences \"\"\" self.query_option_count = query_option_count", "w_samples.T)) / w_samples.shape[0] ) # 1 x 1 -- summed", ":param query_option_count: Number of queries. :param dim_features: Dimension of feature", "sphere(rv_x)) trace = pm.sample( 10000, tune=5000, return_inferencedata=True, init=\"adapt_diag\", ) #", "blank_traj: bool = False, ) -> List[Trajectory]: \"\"\" Generate self.num_queries", "codebase's 'runner.py' object. Note that some variable names are modified", "use \"approx\", query_option_count must be 2; will throw an assertion", "pm.Potential(\"sphere\", sphere(rv_x)) trace = pm.sample( 10000, tune=5000, return_inferencedata=True, init=\"adapt_diag\", )", "update_func + \" is not a valid update function.\" )", "np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed) def rank_objective(features, w_samples)", "querying process.\"\"\" self.demos = trajectories phi_demos = [ domain.features_from_trajectory(x.trajectory) for", "return query def update_weights( self, current_weights: np.ndarray, domain: Environment, feedback:", "= True self.q_session_index = 0 self.query_index = 0 self.config =", "assertion error otherwise :param beta_demo: parameter measuring irrationality of teacher", "= self._query_generator.generate_query_options( self.w_samples, last_query_choice ) else: query_options = self._query_generator.generate_query_options( self.w_samples", "creation: all_files = np.array(list(Path.iterdir(data_path))) all_csvs = all_files[ np.argwhere([f.suffix == \".csv\"", "the value of the objective function for the given set", "= 1.0 - 1.0 / np.sum( np.exp(self.beta_pref * weighted_feature_diff) )", "mean_w / np.linalg.norm(mean_w) var_w = np.var(self.w_samples, axis=0) # Make sure", "difference vectors and # which encode the ranking from the", "# queries self.phi_prefs = [] def load_demo(self, phi_demos: np.ndarray): \"\"\"", "w_samples of w used to approximate the objective :return: the", "1 data = [ [self.q_session_index, 0, \"mean\", mean_w], [self.q_session_index, 0,", "Environment ) -> None: \"\"\"Generate demonstrations to seed the querying", "= pm.Model() with model: # Add random-variable x to model:", "from demonstrated trajectories self.phi_demos = np.zeros((1, self.dim_features)) # a list", "her query ::Inquire-specific inputs: :start_state: The state from which a", "required if self.incl_prev_query is True :param blank_traj: True is last_query_choice", "the respective domain. \"\"\" self._weight_sample_count = weight_sample_count self._trajectory_sample_count = trajectory_sample_count", "result = [] if self.update_func == \"rank\": result = [None]", "self.config = [ self.teacher_type, self.n_demos, self.trim_start, self.query_option_count, self.update_func, self.trajectory_length, self.incl_prev_query,", "params_dict = df.to_dict() return params_dict def process_demonstrations( self, trajectories: list,", "gradients. ::original inputs: :dom: the domain to generate queries on", "self._sampler.sample(N=self.n_samples_summ) \"\"\"If we want to save data as they did", "= self.df.append( pd.DataFrame( data, columns=[\"run #\", \"pref_iter\", \"type\", \"value\"] ),", "= self._dempref_agent_parameters[\"gen_demos\"][0] self.opt_iter_count = self._dempref_agent_parameters[\"opt_iter_count\"][ 0 ] self.trim_start = self._dempref_agent_parameters[\"trim_start\"][0]", "for i in range(self.num_new_queries) ] features_each_q_option = np.zeros( (domain.w_dim, self.num_new_queries)", "list of the human feedback received to this point. DemPref", "the objective function :include_previous_query: boolean for whether one of the", "import itertools import os import time from pathlib import Path", ":num_expectation_samples: number of w_samples to use in approximating the objective", "import theano.tensor as tt class DemPref(Agent): \"\"\"A preference-querying agent seeded", "a trait of the DemPref codebase. z = self.trajectory_length *", "teacher in selecting preferences \"\"\" self.query_option_count = query_option_count self.dim_features =", "\"\"\"Sample trajectories for querying. Code adapted from original DemPref agent.", "n_dem -by- self.dim_features \"\"\" self.phi_demos = phi_demos def load_prefs(self, phi:", "Path from typing import Dict, List import arviz as az", "version 3.5; this codebase adapts their model to PyMC3 version", "inquire.environments.environment import Environment from inquire.interactions.feedback import Query, Trajectory from inquire.interactions.modalities", "-> np.ndarray: \"\"\"Return N samples from the distribution. The distribution", "+ tt.sum( self.beta_demo * tt.dot(self.phi_demos, distribution) ), ) return result", "if self.incl_prev_query: if last_query_choice.null: query_options = self._query_generator.generate_query_options( self.w_samples, blank_traj=True )", "= self._dempref_agent_parameters[ \"query_option_count\" ][0] self.update_func = self._dempref_agent_parameters[\"update_func\"][0] self.trajectory_length = self._dempref_agent_parameters[", "- phi[rank]) result.extend(tmp) self.phi_prefs.append(np.array(result)) def clear_pref(self): \"\"\"Clear all preference information", "use in approximating the objective function :include_previous_query: boolean for whether", "for i in range(len(rank)): exp_rewards_sorted[rank[i]] = exp_rewards[i] value, i =", "are \"rank\", \"pick_best\", and \"approx\". To use \"approx\", query_option_count must", "load_prefs(self, phi: Dict, rank): \"\"\" Load the results of a", "DemPrefQueryGenerator: \"\"\"Generate queries. Code adapted from original DemPref agent. \"\"\"", "Query, Trajectory from inquire.interactions.modalities import Preference import matplotlib.pyplot as plt", ":include_previous_query: boolean for whether one of the queries is the", "= 0, visualize: bool = False, ): \"\"\"Initialize the agent.", "pm.Model() with model: # Add random-variable x to model: rv_x", "= beta_pref self.num_new_queries = ( self.num_queries - 1 if self.include_previous_query", "the domain was reset w/ appropriate seed before beginning #", "- features[1 - i] ) # 1 x feature_size weighted_feature_diff", "to continue\") az.plot_energy(trace) plt.show() input(\"Press enter to continue\") az.plot_posterior(trace) plt.show()", "__init__( self, weight_sample_count: int, trajectory_sample_count: int, trajectory_length: int, interaction_types: list", "None) ).posterior.rv_x.values all_samples = all_samples.reshape( all_samples.shape[0] * all_samples.shape[1], -1 )", "[Path(c) for c in sorted_csvs] # Select the indicated .csv", "= [ opt_res[0][i * z : (i + 1) *", "human feedback received to this point. DemPref utilizes only the", ".csv file index. e.g. if creation_index = 0, use the", "from the distribution. The distribution is defined by applying update_func", "algorithm (an extension of Hamilitonian Monte Carlo MCMC): https://arxiv.org/abs/1111.4246. \"\"\"", "update_weights( self, current_weights: np.ndarray, domain: Environment, feedback: list ) ->", "x in query_options ] phi = {k: features[k] for k", "for i, c in enumerate(controls_set): features_each_q_option[ :, i ] =", "np.zeros((1, self.dim_features)) # a list of np.arrays containing feature difference", "/ np.sum( np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed) def", "assert ( self.update_func == \"pick_best\" or self.update_func == \"approx\" or", "We use the NUTS sampling algorithm (an extension of Hamilitonian", "False ), \"Cannot generate scenario when using approximate gradients\" self.update_func", "for x in query_options ] phi = {k: features[k] for", "self.gen_scenario, self.n_pref_iters, self.epsilon, self.n_samples_summ, self.n_samples_exp, self.beta_demo, self.beta_pref, self.beta_teacher, ] self.df", "L_BFGS. :param controls: an array, concatenated to contain the control", "0 self.query_index = 0 self.config = [ self.teacher_type, self.n_demos, self.trim_start,", "feedback yet received return self.w_samples else: # Use the most", "the new weights from the samples: mean_w = np.mean(self.w_samples, axis=0)", "number of queries to generate at each time step :trajectory_length:", "features.T volumes_removed = [] for i in range(len(features)): feature_diff =", "thus far. :param N: number of w_samples to draw. :param", "ball centered at 0: def sphere(w): \"\"\"Determine if w is", "feature_size exp_rewards = ( np.sum(np.dot(features, w_samples.T), axis=1) / w_samples.shape[0] )", "num_queries: int, trajectory_length: int, num_expectation_samples: int, include_previous_query: bool, generate_scenario: bool,", "queries using approx gradients. ::original inputs: :dom: the domain to", "ranking maximum volume removal objective function. Note: This objective uses", "from pathlib import Path from typing import Dict, List import", "domain. \"\"\" self._weight_sample_count = weight_sample_count self._trajectory_sample_count = trajectory_sample_count self._trajectory_length =", "\"\"\" Get the pre-defined agent parameters \"\"\" self._dempref_agent_parameters = self.read_param_csv(which_param_csv)", "<reponame>HARPLab/inquire \"\"\" An agent which uses demonstrations and preferences. Code", "burn: how many samples before the chain converges; these initial", "self.trajectory_length upper_input_bound = [ x[1] for x in self.domain.control_bounds ]", "= np.zeros((1, self.dim_features)) # a list of np.arrays containing feature", "is the previously selected query :generate_scenario: boolean for whether we", "feature-differences in a single term in PL-update tt.exp( self.beta_pref *", "in query_options ] phi = {k: features[k] for k in", "teacher selecting her query ::Inquire-specific inputs: :start_state: The state from", "tt.sum( self.beta_demo * tt.dot(self.phi_demos, distribution) ) return result elif self.update_func", "queries self.phi_prefs = [] def load_demo(self, phi_demos: np.ndarray): \"\"\" Load", "update_func=self.update_func, beta_demo=self.beta_demo, beta_pref=self.beta_pref, visualize=self._visualize, ) self.w_samples = self._sampler.sample(N=self.n_samples_summ) \"\"\"If we", "removal objective. :param features: a list containing the feature values", "agents' behavior :update_func: the update_func used; the options are \"pick_best\",", "file index. e.g. if creation_index = 0, use the dempref", "load into sampler: features = [ domain.features_from_trajectory(x.trajectory) for x in", "most recent \"\"\" if feedback == []: # No feedback", "gradients. Code adapted from DemPref's ApproxQueryGenerator. \"\"\" if self._query_generator is", "of teacher behavior. CANNOT BE USED WITH (incl_prev_QUERY AND NO", "of w_samples drawn \"\"\" \"\"\"Define model for MCMC. NOTE the", "+ \" is not a valid update function.\" ) #", "self.beta_teacher, ] self.df = pd.DataFrame(columns=[\"run #\", \"pref_iter\", \"type\", \"value\"]) def", "self._w_dim = w_dim assert ( self.update_func == \"pick_best\" or self.update_func", "- features[i] for f in features] ) # query_option_count x", "pm.math.gt(w_sum, 1.0), -100, # -np.inf, self.update_function(w), ) return result try:", "itertools.permutations( list(range(self.num_queries)) ) # iterating over all possible rankings for", "least 1\" assert ( num_expectation_samples >= 1 ), \"QueryGenerator.__init__: num_expectation_samples", "to feature vectors \"\"\" result = [] if self.update_func ==", "N: number of w_samples to draw. :param T: if greater", "paper's codebase designates as their main experiment. \"\"\" def __init__(", "[ tt.sum( # sum across different terms in PL-update -tt.log(", "while ensuring that features of query_options # are epsilon apart:", "information from the sampler.\"\"\" self.phi_prefs = [] def sample(self, N:", "self._sampler.sample(N=self.n_samples_summ) # Return the new weights from the samples: mean_w", "-> List[Trajectory]: \"\"\" Generate self.num_queries number of queries. This function", "of the objective function for the given set of controls", "= visualize if self.update_func == \"approx\": assert ( self.query_option_count ==", "tmp.append(phi[key] - phi[rank]) result.extend(tmp) self.phi_prefs.append(np.array(result)) def clear_pref(self): \"\"\"Clear all preference", ") test_value = test_value / np.linalg.norm(test_value) norm = (test_value **", "# except: except ( pm.SamplingError, pm.parallel_sampling.ParallelSamplingError, ): return None return", "if w is part of the unit ball.\"\"\" w_sum =", ") return result try: # Potential is a \"potential term\"", "of w used to approximate the objective :return: the value", "the unit ball centered at 0: def sphere(w): \"\"\"Determine if", "num_expectation_samples: int, include_previous_query: bool, generate_scenario: bool, update_func: str, beta_pref: float,", "self.trajectory_length = trajectory_length self.num_expectation_samples = num_expectation_samples self.include_previous_query = include_previous_query self.generate_scenario", "the .csvs in descending order by time of creation: all_files", "want to save data as they did in DemPref:\"\"\" mean_w", "1 ), \"QueryGenerator.__init__: num_expectation_samples must be \\ at least 1\"", "least 1\" self.domain = dom self.num_queries = num_queries self.trajectory_length =", "of each query :param w_samples: samples of w, used to", "-100, # -np.inf, self.update_function(w), ) return result try: # Potential", "other agents ::domain: The task's environment ::feedback: A list of", "= args[1] controls = np.array(controls) controls_set = [ controls[i *", "elif self.update_func == \"approx\": return -approx_objective(features_each_q_option, w_samples) else: return -rank_objective(features_each_q_option,", "self.domain.run(c) for c in query_options_controls ] raw_phis = [ self.domain.features_from_trajectory(t)", "Use the most recent Choice in feedback: query_options = feedback[-1].choice.options", "In this instance, the potential is effectively # the model's", "beta_pref self._visualize = visualize if self.update_func == \"approx\": assert (", "\"\"\" domain = args[0] w_samples = args[1] controls = np.array(controls)", "no demonstration is provided\" self.n_samples_summ = self._dempref_agent_parameters[\"n_samples_summ\"][ 0 ] self.n_samples_exp", "0) -> dict: \"\"\"Read an agent-parameterization .csv. ::inputs: :creation_index: A", "and \"approx\". To use \"approx\", query_option_count must be 2; will", "of feature vectors. :param update_func: options are \"rank\", \"pick_best\", and", "= None self._w_samples = None self._query_generator = None self._first_q_session =", "import Agent from inquire.environments.environment import Environment from inquire.interactions.feedback import Query,", "\"\"\"A preference-querying agent seeded with demonstrations. Note: We instantiate the", "from original DemPref agent. \"\"\" def __init__( self, query_option_count: int,", "for the teacher selecting her query ::Inquire-specific inputs: :start_state: The", "-> dict: \"\"\"Read an agent-parameterization .csv. ::inputs: :creation_index: A time-descending", "from which a trajectory begins. \"\"\" assert ( num_queries >=", "self.incl_prev_query: self.all_query_choices[self.random_scenario_index] = choice # Create dictionary map from rankings", "= exp_rewards[i] value, i = 1, 0 for i in", "names are modified to be consist with the Inquire parlance.", "an assertion error otherwise :param beta_demo: parameter measuring irrationality of", "-tt.log( tt.sum( tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i], distribution ) )", "axis=0) # Make sure to properly index data: if self.first_q_session:", "into sampler: features = [ domain.features_from_trajectory(x.trajectory) for x in query_options", "samples from the distribution. The distribution is defined by applying", "visualize if self.update_func == \"approx\": assert ( self.query_option_count == 2", "= self._dempref_agent_parameters[\"n_pref_iters\"][0] self.epsilon = self._dempref_agent_parameters[\"epsilon\"][0] \"\"\" Instantiate the DemPref-specific sampler", "# Get a sampling trace (and avoid Bad Initial Energy):", "vectors and # which encode the ranking from the preference", "new weights from the samples: mean_w = np.mean(self.w_samples, axis=0) mean_w", "Query( query_type=Preference, task=None, start_state=query_state, trajectories=query_options, ) return query def update_weights(", "objective function. Note: This objective uses the Plackett-Luce model of", "MCMC trace.\"\"\" # model accumulates the objects defined within the", "all_samples.shape[1], -1 ) w_samples = np.array([r / np.linalg.norm(r) for r", "distribution ) ) ) ) for i in range(len(self.phi_prefs)) ]", "= trajectory_length self._interaction_types = interaction_types self._visualize = visualize \"\"\" Get", "greater than 1, all samples except each T^{th} sample are", "from typing import Dict, List import arviz as az from", "tt.dot(self.phi_demos, distribution) ) return result elif self.update_func == \"rank\": def", "-- i.e., other agents' behavior :update_func: the update_func used; the", "self._dempref_agent_parameters[\"gen_demos\"][0] self.opt_iter_count = self._dempref_agent_parameters[\"opt_iter_count\"][ 0 ] self.trim_start = self._dempref_agent_parameters[\"trim_start\"][0] self.query_option_count", "# Create dictionary map from rankings to query-option features; #", "- 1): value *= 1.0 / np.sum( np.exp( self.beta_pref *", "rank_objective(features, w_samples) -> float: \"\"\" The ranking maximum volume removal", "self.epsilon, self.n_samples_summ, self.n_samples_exp, self.beta_demo, self.beta_pref, self.beta_teacher, ] self.df = pd.DataFrame(columns=[\"run", "the given queries' features \"\"\" # features: query_option_count x feature_size", "in self.demos ] self._sampler.load_demo(np.array(phi_demos)) self.cleaned_demos = self.demos if self.incl_prev_query: self.all_query_choices", "query-option features; # load into sampler: features = [ domain.features_from_trajectory(x.trajectory)", "used to approximate the objective :return: the value of the", "queries. :param dim_features: Dimension of feature vectors. :param update_func: options", "all samples except each T^{th} sample are discarded :param burn:", "last_query_choice: The previously selected query. Only required if self.incl_prev_query is", "= self._query_generator.generate_query_options( self.w_samples, blank_traj=True ) else: query_options = self._query_generator.generate_query_options( self.w_samples,", "def update_weights( self, current_weights: np.ndarray, domain: Environment, feedback: list )", "self.n_pref_iters = self._dempref_agent_parameters[\"n_pref_iters\"][0] self.epsilon = self._dempref_agent_parameters[\"epsilon\"][0] \"\"\" Instantiate the DemPref-specific", "self.num_new_queries) ) for i, c in enumerate(controls_set): features_each_q_option[ :, i", "return np.min(volumes_removed) def approx_objective( features: np.ndarray, w_samples: np.ndarray ) ->", "main experiment. \"\"\" def __init__( self, weight_sample_count: int, trajectory_sample_count: int,", "assert ( self.generate_scenario is False ), \"Cannot generate scenario when", "yet received return self.w_samples else: # Use the most recent", "volumes_removed.append(1 - value) return np.min(volumes_removed) # The following optimization is", "for c in sorted_csvs] # Select the indicated .csv and", "centered at 0: def sphere(w): \"\"\"Determine if w is part", "class generates queries using approx gradients. ::original inputs: :dom: the", "domain.features_from_trajectory( query_options[n].trajectory ) ) ) query_diff = max(query_diffs) query =", "np.ndarray, domain: Environment, feedback: list ) -> np.ndarray: \"\"\"Update the", "which encode the ranking from the preference # queries self.phi_prefs", "** 2).sum() if norm <= 1: break # Get a", "process_demonstrations( self, trajectories: list, domain: Environment ) -> None: \"\"\"Generate", "= ( features[i] - features[1 - i] ) # 1", "break if self._visualize: az.plot_trace(trace) plt.show() input(\"Press enter to continue\") az.plot_energy(trace)", "array, concatenated to contain the control input for all queries", "preference-querying agent seeded with demonstrations. Note: We instantiate the agent", "] self.trim_start = self._dempref_agent_parameters[\"trim_start\"][0] self.query_option_count = self._dempref_agent_parameters[ \"query_option_count\" ][0] self.update_func", "\"\"\" assert ( num_queries >= 1 ), \"QueryGenerator.__init__: num_queries must", "Get a sampling trace (and avoid Bad Initial Energy): while", ").posterior.rv_x.values all_samples = all_samples.reshape( all_samples.shape[0] * all_samples.shape[1], -1 ) w_samples", "True: trace = self.get_trace(test_value) if trace is not None: break", ") return result elif self.update_func == \"pick_best\": def update_function(distribution): result", "value of the objective function for the given set of", "* self.num_new_queries * self.trajectory_length, approx_grad=True, ) query_options_controls = [ opt_res[0][i", "\"incl_prev_query\" ][0] self.gen_scenario = self._dempref_agent_parameters[\"gen_scenario\"][0] self.n_pref_iters = self._dempref_agent_parameters[\"n_pref_iters\"][0] self.epsilon =", "across different terms in PL-update -tt.log( [ tt.sum( # sum", "self.phi_prefs[i][j], distribution, ) ) ) for j in range( self.query_option_count", "distribution is defined by applying update_func on the demonstrations and", "tt.sum( [ -tt.nnet.relu( -self.beta_pref * tt.dot(self.phi_prefs[i], distribution) ) for i", "1.0, visualize: bool = False, ): \"\"\" Initialize the sampler.", "interaction_types: list = [], w_dim: int = 4, which_param_csv: int", ":param features: the feature values of each query option :param", "parlance. \"\"\" self.domain_name = self._dempref_agent_parameters[\"domain\"][0] self.teacher_type = self._dempref_agent_parameters[\"teacher_type\"][0] self.n_demos =", "2 ), \"Cannot use approximation to update function if query_option_count", "= 1.0 - np.minimum( 1.0, np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed)", "this class generates queries using approx gradients. ::original inputs: :dom:", "queries [ tt.sum( # sum across different terms in PL-update", "process.\"\"\" self.demos = trajectories phi_demos = [ domain.features_from_trajectory(x.trajectory) for x", "# Add random-variable x to model: rv_x = pm.Uniform( name=\"rv_x\",", "Note the domain was reset w/ appropriate seed before beginning", "= 0, use the dempref dempref_agent.csv most recently created. \"\"\"", "guide). In this instance, the potential is effectively # the", "str, beta_pref: float, ) -> None: \"\"\" Initialize the approx", "each query option :param w_samples: w_samples of w used to", "query generator: \"\"\" self._sampler = None self._w_samples = None self._query_generator", "try: # Potential is a \"potential term\" defined as an", "query :param w_samples: samples of w, used to approximate the", "= [ self.domain.features_from_trajectory(t) for t in raw_trajectories ] query_options_trajectories =", "query = Query( query_type=Preference, task=None, start_state=query_state, trajectories=query_options, ) return query", "distribution) ), ) return result self.update_function = update_function while True:", "= feedback[-1].choice.options choice = feedback[-1].choice.selection choice_index = query_options.index(choice) if self.incl_prev_query:", "dictionary map from rankings to query-option features; # load into", "sorted(all_csvs, key=os.path.getmtime) sorted_csvs = [Path(c) for c in sorted_csvs] #", "is None: self._query_generator = self.DemPrefQueryGenerator( dom=domain, num_queries=self.query_option_count, trajectory_length=self.trajectory_length, num_expectation_samples=self.n_samples_exp, include_previous_query=self.incl_prev_query,", "= [] for i in range(len(features)): feature_diff = ( features[i]", "= self.trajectory_length * self.domain.control_size lower_input_bound = [ x[0] for x", "samples are discarded :return: list of w_samples drawn \"\"\" \"\"\"Define", "model: # Add random-variable x to model: rv_x = pm.Uniform(", "= [ self.teacher_type, self.n_demos, self.trim_start, self.query_option_count, self.update_func, self.trajectory_length, self.incl_prev_query, self.gen_scenario,", "np.linalg.norm(mean_w) return np.array(mean_w, copy=True).reshape(1, -1) def read_param_csv(self, which_csv: int =", "* tt.dot(self.phi_prefs[i], distribution) ) for i in range(len(self.phi_prefs)) ] )", "agent. \"\"\" def __init__( self, dom: Environment, num_queries: int, trajectory_length:", "range(len(query_options))} self._sampler.load_prefs(phi, choice_index) self.w_samples = self._sampler.sample(N=self.n_samples_summ) # Return the new", "test_val: np.ndarray) -> az.InferenceData: \"\"\"Create an MCMC trace.\"\"\" # model", "self.update_func == \"rank\" or self.update_func == \"pick_best\" ): raise Exception(", "each query :param w_samples: samples of w, used to approximate", "array containing feature vectors for each demonstration; has dimension n_dem", "the dempref dempref_agent.csv most recently created. \"\"\" data_path = Path.cwd()", "of teacher in selecting preferences \"\"\" self.query_option_count = query_option_count self.dim_features", "vectors. :param update_func: options are \"rank\", \"pick_best\", and \"approx\". To", ":param demos: a Numpy array containing feature vectors for each", "0, use the dempref dempref_agent.csv most recently created. \"\"\" data_path", "- domain.features_from_trajectory( query_options[n].trajectory ) ) ) query_diff = max(query_diffs) query", "Path(\"../inquire/agents/\") # Sort the .csvs in descending order by time", "1 ), \"QueryGenerator.__init__: num_queries must be at least 1\" assert", ") ) ) ) for i in range(len(self.phi_prefs)) ] )", "pd import pymc3 as pm import pymc3.distributions.transforms as tr import", "Instance attributes from orginal codebase's 'runner.py' object. Note that some", "update_function(distribution): result = ( tt.sum( # sum across different queries", "to arguments corresponding to what the the original paper's codebase", "term\" defined as an \"additional # tensor...to be added to", "Instantiate the DemPref-specific sampler and query generator: \"\"\" self._sampler =", "function for the given set of controls \"\"\" domain =", "self._dempref_agent_parameters[ \"query_option_count\" ][0] self.update_func = self._dempref_agent_parameters[\"update_func\"][0] self.trajectory_length = self._dempref_agent_parameters[ \"trajectory_length\"", "Environment) -> np.ndarray: \"\"\"Randomly initialize weights for gradient descent.\"\"\" self.reset()", "# Sort the .csvs in descending order by time of", "range(len(rank)): result[i] = phi[rank[i]] elif self.update_func == \"approx\": result =", "features_each_q_option, domain.features_from_trajectory(last_query_choice), axis=1, ) if self.update_func == \"pick_best\": return -objective(features_each_q_option,", "self.DemPrefSampler( query_option_count=self.query_option_count, dim_features=self._w_dim, update_func=self.update_func, beta_demo=self.beta_demo, beta_pref=self.beta_pref, visualize=self._visualize, ) self.w_samples =", "__init__( self, query_option_count: int, dim_features: int, update_func: str = \"approx\",", "behavior. CANNOT BE USED WITH (incl_prev_QUERY AND NO DEMPREF). :param", "features[k] for k in range(len(query_options))} self._sampler.load_prefs(phi, choice_index) self.w_samples = self._sampler.sample(N=self.n_samples_summ)", "itertools import os import time from pathlib import Path from", "most recent Choice in feedback: query_options = feedback[-1].choice.options choice =", "Note: This objective uses the Plackett-Luce model of teacher behavior.", "] self.n_samples_exp = self._dempref_agent_parameters[\"n_samples_exp\"][0] self.beta_demo = self._dempref_agent_parameters[\"beta_demo\"][0] self.beta_pref = self._dempref_agent_parameters[\"beta_pref\"][0]", "[self.q_session_index, 0, \"mean\", mean_w], [self.q_session_index, 0, \"var\", var_w], ] self.df", "self.beta_pref = beta_pref self.num_new_queries = ( self.num_queries - 1 if", "is blank. (Only True if not using Dempref but using", "and not blank_traj: features_each_q_option = np.append( features_each_q_option, domain.features_from_trajectory(last_query_choice), axis=1, )", "phi[1 - rank] elif self.update_func == \"pick_best\": result, tmp =", "in range(len(features)): feature_diff = ( features[i] - features[1 - i]", "controls_as_input=True ) if self.include_previous_query and not blank_traj: features_each_q_option = np.append(", "the first argument is the domain, and the second is", "result = phi[rank] - phi[1 - rank] elif self.update_func ==", "np.array(exp_rewards_sorted[i:]) - exp_rewards_sorted[i] ) ) ) volumes_removed.append(1 - value) return", "the Sampler. :param phi: a dictionary mapping rankings (0,...,query_option_count-1) to", "= self.DemPrefQueryGenerator( dom=domain, num_queries=self.query_option_count, trajectory_length=self.trajectory_length, num_expectation_samples=self.n_samples_exp, include_previous_query=self.incl_prev_query, generate_scenario=self.gen_scenario, update_func=self.update_func, beta_pref=self.beta_pref,", "Initial Energy): while True: trace = self.get_trace(test_value) if trace is", "A list of the human feedback received to this point.", "The following optimization is w.r.t. volume removal; the domain's #", "features; this difference is a trait of the DemPref codebase.", "and Preferences. \"\"\" import itertools import os import time from", "all_csvs = np.array([str(f[0]).strip() for f in all_csvs]) sorted_csvs = sorted(all_csvs,", "function :include_previous_query: boolean for whether one of the queries is", "self.generate_scenario is False ), \"Cannot generate scenario when using approximate", "= [] for i in range(len(features)): feature_diff = np.array( [f", "approx gradients. ::original inputs: :dom: the domain to generate queries", "def update_function(distribution): result = tt.sum( [ -tt.nnet.relu( -self.beta_pref * tt.dot(self.phi_prefs[i],", "plt import numpy as np import pandas as pd import", "raw_trajectories = [ self.domain.run(c) for c in query_options_controls ] raw_phis", "assert ( self.n_demos > 0 ), \"Cannot include previous query", "across different queries [ tt.sum( # sum across different terms", "sampler: features = [ domain.features_from_trajectory(x.trajectory) for x in query_options ]", "= False, ) -> list: \"\"\"Generate query using approximate gradients.", "* all_samples.shape[1], -1 ) w_samples = np.array([r / np.linalg.norm(r) for", "self.update_func = update_func self.beta_pref = beta_pref self.num_new_queries = ( self.num_queries", "elif not ( self.update_func == \"rank\" or self.update_func == \"pick_best\"", "enter to continue\") az.plot_posterior(trace) plt.show() input(\"Press enter to continue\") all_samples", "choice_index) self.w_samples = self._sampler.sample(N=self.n_samples_summ) # Return the new weights from", "query_option_count > 2\" elif not ( self.update_func == \"rank\" or", "each time step :trajectory_length: the length of each query :num_expectation_samples:", "\"mean\", mean_w], [self.q_session_index, 0, \"var\", var_w], ] self.df = self.df.append(", "key=os.path.getmtime) sorted_csvs = [Path(c) for c in sorted_csvs] # Select", "self.epsilon: if self.incl_prev_query: if last_query_choice.null: query_options = self._query_generator.generate_query_options( self.w_samples, blank_traj=True", "] = domain.features_from_trajectory( c, controls_as_input=True ) if self.include_previous_query and not", "/ np.linalg.norm(mean_w) var_w = np.var(self.w_samples, axis=0) # Make sure to", "vectors for each demonstration; has dimension n_dem -by- self.dim_features \"\"\"", "time.perf_counter() def func(controls: np.ndarray, *args) -> float: \"\"\"Minimize via L_BFGS.", "[ self.domain.features_from_trajectory(t) for t in raw_trajectories ] query_options_trajectories = [", "w_samples: w_samples of w used to approximate the objective :return:", "self.read_param_csv(which_param_csv) \"\"\" Instance attributes from orginal codebase's 'runner.py' object. Note", "arguments corresponding to what the the original paper's codebase designates", "# the model's log-likelihood. p = pm.Potential(\"sphere\", sphere(rv_x)) trace =", "testval=test_val, ) # Define the prior as the unit ball", "(test_value ** 2).sum() if norm <= 1: break # Get", "v_removed = 1.0 - np.minimum( 1.0, np.exp(self.beta_pref * weighted_feature_diff) )", "all_samples]) return w_samples def get_trace(self, test_val: np.ndarray) -> az.InferenceData: \"\"\"Create", "= 0 self.config = [ self.teacher_type, self.n_demos, self.trim_start, self.query_option_count, self.update_func,", "model of teacher behavior. CANNOT BE USED WITH (incl_prev_QUERY AND", "dictionary mapping rankings (0,...,query_option_count-1) to feature vectors \"\"\" result =", "\"\"\" if self._query_generator is None: self._query_generator = self.DemPrefQueryGenerator( dom=domain, num_queries=self.query_option_count,", "measuring irrationality of teacher in selecting preferences \"\"\" self.query_option_count =", "= np.array([r / np.linalg.norm(r) for r in all_samples]) return w_samples", "self.num_queries ) def generate_query_options( self, w_samples: np.ndarray, last_query_choice: Trajectory =", "Dict, List import arviz as az from inquire.agents.agent import Agent", ") else: query_options = self._query_generator.generate_query_options( self.w_samples, last_query_choice ) else: query_options", "in range(len(rank)): exp_rewards_sorted[rank[i]] = exp_rewards[i] value, i = 1, 0", "\"\"\"Generate queries. Code adapted from original DemPref agent. \"\"\" def", "return_inferencedata=True, init=\"adapt_diag\", ) # except: except ( pm.SamplingError, pm.parallel_sampling.ParallelSamplingError, ):", "used; the options are \"pick_best\", \"approx\", and \"rank\" :beta_pref: the", "list: \"\"\"Generate query using approximate gradients. Code adapted from DemPref's", "tt.sum( [ -tt.log( tt.sum( tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i], distribution", "= [] def load_demo(self, phi_demos: np.ndarray): \"\"\" Load the demonstrations", "import Environment from inquire.interactions.feedback import Query, Trajectory from inquire.interactions.modalities import", "not ( self.update_func == \"rank\" or self.update_func == \"pick_best\" ):", "\"\"\"Return N samples from the distribution. The distribution is defined", "previously selected query. Only required if self.incl_prev_query is True :param", "in approximating the objective function :include_previous_query: boolean for whether one", "self.trim_start = self._dempref_agent_parameters[\"trim_start\"][0] self.query_option_count = self._dempref_agent_parameters[ \"query_option_count\" ][0] self.update_func =", "for new query session.\"\"\" if self._sampler is not None: self._sampler.clear_pref()", "query_options.index(choice) if self.incl_prev_query: self.all_query_choices[self.random_scenario_index] = choice # Create dictionary map", "= feedback[-1].choice.selection choice_index = query_options.index(choice) if self.incl_prev_query: self.all_query_choices[self.random_scenario_index] = choice", "= tt.sum( [ -tt.log( tt.sum( tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i],", "Functions by Integrating Human Demonstrations and Preferences. \"\"\" import itertools", "] all_csvs = np.array([str(f[0]).strip() for f in all_csvs]) sorted_csvs =", "is the domain, and the second is the samples that", "elif self.update_func == \"pick_best\": def update_function(distribution): result = tt.sum( [", "of queries. :param dim_features: Dimension of feature vectors. :param update_func:", "domain.features_from_trajectory(last_query_choice), axis=1, ) if self.update_func == \"pick_best\": return -objective(features_each_q_option, w_samples)", "None: \"\"\"Generate demonstrations to seed the querying process.\"\"\" self.demos =", "a sampling trace (and avoid Bad Initial Energy): while True:", "the chain converges; these initial samples are discarded :return: list", "* self.trajectory_length, approx_grad=True, ) query_options_controls = [ opt_res[0][i * z", "data, columns=[\"run #\", \"pref_iter\", \"type\", \"value\"] ), ignore_index=True, ) def", "PyMC3 version 3.5; this codebase adapts their model to PyMC3", "0 self.config = [ self.teacher_type, self.n_demos, self.trim_start, self.query_option_count, self.update_func, self.trajectory_length,", "self.n_pref_iters, self.epsilon, self.n_samples_summ, self.n_samples_exp, self.beta_demo, self.beta_pref, self.beta_teacher, ] self.df =", "in range(len(rank)): result[i] = phi[rank[i]] elif self.update_func == \"approx\": result", "the given queries' features \"\"\" volumes_removed = [] for i", "* z] for i in range(self.num_new_queries) ] end = time.perf_counter()", "the maximum volume removal objective. :param features: the feature values", "return -approx_objective(features_each_q_option, w_samples) else: return -rank_objective(features_each_q_option, w_samples) def objective(features: List,", "No feedback yet received return self.w_samples else: # Use the", "typing import Dict, List import arviz as az from inquire.agents.agent", "features: a list containing the feature values of each query", "features \"\"\" # features: query_option_count x feature_size # w_samples: n_samples", ") query_options_controls = [ opt_res[0][i * z : (i +", "x 1 -- summed across w_samples volumes_removed = [] rankings", "\"\"\" self._weight_sample_count = weight_sample_count self._trajectory_sample_count = trajectory_sample_count self._trajectory_length = trajectory_length", "inquire.interactions.modalities import Preference import matplotlib.pyplot as plt import numpy as", "== 2 ), \"Cannot use approximation to update function if", "Irrelevant for DemPref; useful to other agents ::domain: The task's", "of w :param last_query_choice: The previously selected query. Only required", "a Numpy array containing feature vectors for each demonstration; has", "query_option_count x 1 -- summed across w_samples volumes_removed = []", "preference # queries self.phi_prefs = [] def load_demo(self, phi_demos: np.ndarray):", "range(len(raw_trajectories)) ] if self.include_previous_query and not blank_traj: return [last_query_choice] +", "drawn \"\"\" \"\"\"Define model for MCMC. NOTE the DemPref codebase", "1 ), \"QueryGenerator.__init__: trajectory_length must be at least 1\" assert", "for rank in rankings: exp_rewards_sorted = [None] * len(rank) for", "in feedback: query_options = feedback[-1].choice.options choice = feedback[-1].choice.selection choice_index =", "self.beta_demo, self.beta_pref, self.beta_teacher, ] self.df = pd.DataFrame(columns=[\"run #\", \"pref_iter\", \"type\",", "\"\"\"If we want to save data as they did in", "int, include_previous_query: bool, generate_scenario: bool, update_func: str, beta_pref: float, )", "[] for i in range(len(features)): feature_diff = ( features[i] -", "tt.sum( # sum down different feature-differences in a single term", ") def generate_query_options( self, w_samples: np.ndarray, last_query_choice: Trajectory = None,", "def initialize_weights(self, domain: Environment) -> np.ndarray: \"\"\"Randomly initialize weights for", "previous query if no demonstration is provided\" self.n_samples_summ = self._dempref_agent_parameters[\"n_samples_summ\"][", "query_option_count=self.query_option_count, dim_features=self._w_dim, update_func=self.update_func, beta_demo=self.beta_demo, beta_pref=self.beta_pref, visualize=self._visualize, ) self.w_samples = self._sampler.sample(N=self.n_samples_summ)", "and \"rank\" :beta_pref: the rationality parameter for the teacher selecting", "update_func on the demonstrations and preferences observed thus far. :param", "test_value / np.linalg.norm(test_value) norm = (test_value ** 2).sum() if norm", "agent. \"\"\" def __init__( self, query_option_count: int, dim_features: int, update_func:", "gradients\" self.update_func = update_func self.beta_pref = beta_pref self.num_new_queries = (", "\"approx\". To use \"approx\", query_option_count must be 2; will throw", "instance, the potential is effectively # the model's log-likelihood. p", "the human feedback received to this point. DemPref utilizes only", "in rankings: exp_rewards_sorted = [None] * len(rank) for i in", "\"\"\" self.phi_demos = phi_demos def load_prefs(self, phi: Dict, rank): \"\"\"", "inquire.agents.agent import Agent from inquire.environments.environment import Environment from inquire.interactions.feedback import", "otherwise :param beta_demo: parameter measuring irrationality of teacher in providing", "import time from pathlib import Path from typing import Dict,", "or self.update_func == \"pick_best\" ): raise Exception( update_func + \"", "adapted from original DemPref agent. \"\"\" def __init__( self, dom:", "teacher behavior. CANNOT BE USED WITH (incl_prev_QUERY AND NO DEMPREF).", "the ranking from the preference # queries self.phi_prefs = []", "x[0] for x in self.domain.control_bounds ] * self.trajectory_length upper_input_bound =", "upper_input_bound, size=(self.num_new_queries * z), ), args=(self.domain, w_samples), bounds=self.domain.control_bounds * self.num_new_queries", "a dictionary: chosen_csv = sorted_csvs[-which_csv] df = pd.read_csv(chosen_csv) params_dict =", "1 -- summed across w_samples v_removed = 1.0 - np.minimum(", "self.n_demos > 0 ), \"Cannot include previous query if no", "as they did in DemPref:\"\"\" self.first_q_session = True self.q_session_index =", "= mean_w / np.linalg.norm(mean_w) var_w = np.var(self.w_samples, axis=0) # Make", "DemPref utilizes only the most recent \"\"\" if feedback ==", "::Inquire-specific inputs: :start_state: The state from which a trajectory begins.", "query_options = feedback[-1].choice.options choice = feedback[-1].choice.selection choice_index = query_options.index(choice) if", "1.0 / np.sum( np.exp( self.beta_pref * ( np.array(exp_rewards_sorted[i:]) - exp_rewards_sorted[i]", "number of queries. This function produces query options that (locally)", "self.incl_prev_query and self.teacher_type == \"term\": assert ( self.n_demos > 0", "in sorted_csvs] # Select the indicated .csv and convert it", "trace is not None: break if self._visualize: az.plot_trace(trace) plt.show() input(\"Press", "while True: trace = self.get_trace(test_value) if trace is not None:", "0: self.random_scenario_index = np.random.randint(len(self.demos)) else: self.random_scenario_index = 0 last_query_choice =", "sorted_csvs = sorted(all_csvs, key=os.path.getmtime) sorted_csvs = [Path(c) for c in", "True self.q_session_index = 0 self.query_index = 0 self.config = [", "Create dictionary map from rankings to query-option features; # load", "2\" elif not ( self.update_func == \"rank\" or self.update_func ==", "number of w_samples to draw. :param T: if greater than", "= [ domain.features_from_trajectory(x.trajectory) for x in query_options ] phi =", "1 if self.include_previous_query else self.num_queries ) def generate_query_options( self, w_samples:", "feature values of each query option :param w_samples: w_samples of", "experiment. \"\"\" def __init__( self, weight_sample_count: int, trajectory_sample_count: int, trajectory_length:", "samples of w, used to approximate the objective :return: the", "with model: # Add random-variable x to model: rv_x =", "self.beta_pref * ( np.array(exp_rewards_sorted[i:]) - exp_rewards_sorted[i] ) ) ) volumes_removed.append(1", "np.array(list(Path.iterdir(data_path))) all_csvs = all_files[ np.argwhere([f.suffix == \".csv\" for f in", "N: int, T: int = 1, burn: int = 1000)", "), \"QueryGenerator.__init__: num_queries must be at least 1\" assert (", "begins. \"\"\" assert ( num_queries >= 1 ), \"QueryGenerator.__init__: num_queries", "self.update_func = self._dempref_agent_parameters[\"update_func\"][0] self.trajectory_length = self._dempref_agent_parameters[ \"trajectory_length\" ][0] self.incl_prev_query =", "self._first_q_session = True self._q_session_index = 0 self._query_index = 0 self._w_dim", "= np.random.uniform( low=-1, high=1, size=self.dim_features ) test_value = test_value /", "query_diffs.append( np.linalg.norm( domain.features_from_trajectory( query_options[m].trajectory ) - domain.features_from_trajectory( query_options[n].trajectory ) )", "int, T: int = 1, burn: int = 1000) ->", "self.update_func == \"pick_best\" ): raise Exception( update_func + \" is", "if norm <= 1: break # Get a sampling trace", "= [ Trajectory(raw_trajectories[i], raw_phis[i]) for i in range(len(raw_trajectories)) ] if", "= self._dempref_agent_parameters[\"beta_pref\"][0] self.beta_teacher = self._dempref_agent_parameters[\"beta_teacher\"][0] \"\"\"If we want to save", "of the queries is the previously selected query :generate_scenario: boolean", "self.update_func == \"approx\": def update_function(distribution): result = tt.sum( [ -tt.nnet.relu(", "logp\"(PyMC3 developer # guide). In this instance, the potential is", "Define update function: if self.update_func == \"approx\": def update_function(distribution): result", "throw an assertion error otherwise :param beta_demo: parameter measuring irrationality", "z] for i in range(self.num_new_queries) ] features_each_q_option = np.zeros( (domain.w_dim,", "self._query_generator = None self._first_q_session = True self._q_session_index = 0 self._query_index", "\"approx\": assert ( self.query_option_count == 2 ), \"Cannot use approximation", "model: rv_x = pm.Uniform( name=\"rv_x\", shape=self.dim_features, lower=-1, upper=1, testval=test_val, )", "Preferences. \"\"\" import itertools import os import time from pathlib", "][0] self.gen_scenario = self._dempref_agent_parameters[\"gen_scenario\"][0] self.n_pref_iters = self._dempref_agent_parameters[\"n_pref_iters\"][0] self.epsilon = self._dempref_agent_parameters[\"epsilon\"][0]", "# context: model = pm.Model() with model: # Add random-variable", "Generate self.num_queries number of queries. This function produces query options", "option :param w_samples: w_samples of w used to approximate the", "w_samples to draw. :param T: if greater than 1, all", "- rank] elif self.update_func == \"pick_best\": result, tmp = [phi[rank]", "(domain.w_dim, self.num_new_queries) ) for i, c in enumerate(controls_set): features_each_q_option[ :,", "= pd.DataFrame(columns=[\"run #\", \"pref_iter\", \"type\", \"value\"]) def initialize_weights(self, domain: Environment)", "objective(features: List, w_samples: np.ndarray) -> float: \"\"\" Maximize the volume", "with demonstrations. Note: We instantiate the agent according to arguments", "value) return np.min(volumes_removed) # The following optimization is w.r.t. volume", "np.linalg.norm(mean_w) var_w = np.var(self.w_samples, axis=0) # Make sure to properly", "MCMC): https://arxiv.org/abs/1111.4246. \"\"\" # Define update function: if self.update_func ==", "not None: self._sampler.clear_pref() self._sampler = self.DemPrefSampler( query_option_count=self.query_option_count, dim_features=self._w_dim, update_func=self.update_func, beta_demo=self.beta_demo,", "-tt.log( [ tt.sum( # sum down different feature-differences in a", "in selecting preferences \"\"\" self.query_option_count = query_option_count self.dim_features = dim_features", "generate_query( self, domain: Environment, query_state: int, curr_w: np.ndarray, verbose: bool", "float = 1.0, visualize: bool = False, ): \"\"\" Initialize", "inputs: :dom: the domain to generate queries on :num_queries: number", "preferences observed thus far. :param N: number of w_samples to", "= beta_pref self._visualize = visualize if self.update_func == \"approx\": assert", "tt.dot( self.phi_prefs[i][ j:, : ] - self.phi_prefs[i][j], distribution, ) )", "\"\"\" start = time.perf_counter() def func(controls: np.ndarray, *args) -> float:", "args: the first argument is the domain, and the second", "self.query_option_count, self.update_func, self.trajectory_length, self.incl_prev_query, self.gen_scenario, self.n_pref_iters, self.epsilon, self.n_samples_summ, self.n_samples_exp, self.beta_demo,", "tt.sum( # sum across different queries [ tt.sum( # sum", ") w_samples = np.array([r / np.linalg.norm(r) for r in all_samples])", "Inquire parlance. \"\"\" self.domain_name = self._dempref_agent_parameters[\"domain\"][0] self.teacher_type = self._dempref_agent_parameters[\"teacher_type\"][0] self.n_demos", "in enumerate(controls_set): features_each_q_option[ :, i ] = domain.features_from_trajectory( c, controls_as_input=True", "tt.sum( # sum across different terms in PL-update -tt.log( [", "self.beta_demo * tt.dot(self.phi_demos, distribution) ) return result elif self.update_func ==", "dempref dempref_agent.csv most recently created. \"\"\" data_path = Path.cwd() /", "self._dempref_agent_parameters[\"trim_start\"][0] self.query_option_count = self._dempref_agent_parameters[ \"query_option_count\" ][0] self.update_func = self._dempref_agent_parameters[\"update_func\"][0] self.trajectory_length", "all_csvs = all_files[ np.argwhere([f.suffix == \".csv\" for f in all_files])", "in raw_trajectories ] query_options_trajectories = [ Trajectory(raw_trajectories[i], raw_phis[i]) for i", "\"\"\" \"\"\"Define model for MCMC. NOTE the DemPref codebase creates", "self._q_session_index = 0 self._query_index = 0 self._w_dim = w_dim assert", "def __init__( self, weight_sample_count: int, trajectory_sample_count: int, trajectory_length: int, interaction_types:", "visualize=self._visualize, ) self.w_samples = self._sampler.sample(N=self.n_samples_summ) \"\"\"If we want to save", "w_samples drawn \"\"\" \"\"\"Define model for MCMC. NOTE the DemPref", "return np.min(volumes_removed) # The following optimization is w.r.t. volume removal;", "A time-descending .csv file index. e.g. if creation_index = 0,", "# Note the domain was reset w/ appropriate seed before", "self.update_func == \"rank\": def update_function(distribution): result = ( tt.sum( #", "self.opt_iter_count = self._dempref_agent_parameters[\"opt_iter_count\"][ 0 ] self.trim_start = self._dempref_agent_parameters[\"trim_start\"][0] self.query_option_count =", "\"\"\"Create an MCMC trace.\"\"\" # model accumulates the objects defined", "function, evaluated on the given queries' features \"\"\" # features:", "function produces query options that (locally) maximize the maximum volume", "by Integrating Human Demonstrations and Preferences. \"\"\" import itertools import", "(Only True if not using Dempref but using incl_prev_) :return:", "x feature_size exp_rewards = ( np.sum(np.dot(features, w_samples.T), axis=1) / w_samples.shape[0]", "as the unit ball centered at 0: def sphere(w): \"\"\"Determine", "self.gen_demos = self._dempref_agent_parameters[\"gen_demos\"][0] self.opt_iter_count = self._dempref_agent_parameters[\"opt_iter_count\"][ 0 ] self.trim_start =", "in range(self.num_new_queries) ] features_each_q_option = np.zeros( (domain.w_dim, self.num_new_queries) ) for", "for r in all_samples]) return w_samples def get_trace(self, test_val: np.ndarray)", "agent parameters \"\"\" self._dempref_agent_parameters = self.read_param_csv(which_param_csv) \"\"\" Instance attributes from", "Choice in feedback: query_options = feedback[-1].choice.options choice = feedback[-1].choice.selection choice_index", "= [] if self.update_func == \"rank\": result = [None] *", "data_path = Path.cwd() / Path(\"../inquire/agents/\") # Sort the .csvs in", "num_expectation_samples must be \\ at least 1\" self.domain = dom", "feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T), axis=1) / w_samples.shape[0] )", "assert ( self.query_option_count == 2 ), \"Cannot use approximation to", "= [ x[1] for x in self.domain.control_bounds ] * self.trajectory_length", "os import time from pathlib import Path from typing import", "trajectory_length must be at least 1\" assert ( num_expectation_samples >=", ":param dim_features: Dimension of feature vectors. :param update_func: options are", "- i] ) # 1 x feature_size weighted_feature_diff = (", "query_options = self._query_generator.generate_query_options( self.w_samples ) query_diffs = [] for m", "self._sampler.load_prefs(phi, choice_index) self.w_samples = self._sampler.sample(N=self.n_samples_summ) # Return the new weights", "to continue\") az.plot_posterior(trace) plt.show() input(\"Press enter to continue\") all_samples =", "( self.update_func == \"rank\" or self.update_func == \"pick_best\" ): raise", "if self.update_func == \"approx\": assert ( self.query_option_count == 2 ),", "== \"pick_best\" ): raise Exception( update_func + \" is not", "== \"approx\": assert ( self.query_option_count == 2 ), \"Cannot use", "and preferences. Code adapted from Learning Reward Functions by Integrating", "generate_scenario # Currently must be False ) assert ( self.generate_scenario", "tt.sum( self.beta_demo * tt.dot(self.phi_demos, distribution) ), ) return result self.update_function", "List import arviz as az from inquire.agents.agent import Agent from", "curr_w: np.ndarray, verbose: bool = False, ) -> list: \"\"\"Generate", "query options that (locally) maximize the maximum volume removal objective.", "maximum volume removal objective. :param w_samples: Samples of w :param", "= df.to_dict() return params_dict def process_demonstrations( self, trajectories: list, domain:", "result = [None] * len(rank) for i in range(len(rank)): result[i]", "query_option_count must be 2; will throw an assertion error otherwise", "1): value *= 1.0 / np.sum( np.exp( self.beta_pref * (", "self.first_q_session: self.first_q_session = False else: self.q_session_index += 1 data =", "features: np.ndarray, w_samples: np.ndarray ) -> float: \"\"\" Approximate the", "= self._dempref_agent_parameters[\"n_samples_summ\"][ 0 ] self.n_samples_exp = self._dempref_agent_parameters[\"n_samples_exp\"][0] self.beta_demo = self._dempref_agent_parameters[\"beta_demo\"][0]", "w_dim assert ( self.update_func == \"pick_best\" or self.update_func == \"approx\"", "= self._dempref_agent_parameters[\"gen_scenario\"][0] self.n_pref_iters = self._dempref_agent_parameters[\"n_pref_iters\"][0] self.epsilon = self._dempref_agent_parameters[\"epsilon\"][0] \"\"\" Instantiate", "trait of the DemPref codebase. z = self.trajectory_length * self.domain.control_size", "int, trajectory_length: int, interaction_types: list = [], w_dim: int =", ") # query_option_count x feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T),", "= [] def sample(self, N: int, T: int = 1,", "of w, used to approximate the objective :return: the value", "== \"rank\" or self.update_func == \"pick_best\" ): raise Exception( update_func", "the feature values of each query :param w_samples: samples of", "are discarded :param burn: how many samples before the chain", "# Make sure to properly index data: if self.first_q_session: self.first_q_session", "preferences. Code adapted from Learning Reward Functions by Integrating Human", "= interaction_types self._visualize = visualize \"\"\" Get the pre-defined agent", "None self._first_q_session = True self._q_session_index = 0 self._query_index = 0", "# model accumulates the objects defined within the proceeding #", "volume removal objective. :param features: the feature values of each", "appropriate seed before beginning # this query session; domain.run(c) will", ") volumes_removed.append(v_removed) return np.min(volumes_removed) def rank_objective(features, w_samples) -> float: \"\"\"", "# tensor...to be added to the model logp\"(PyMC3 developer #", "np.min(volumes_removed) def rank_objective(features, w_samples) -> float: \"\"\" The ranking maximum", "will be used to approximate the objective function :return: the", "] if self.include_previous_query and not blank_traj: return [last_query_choice] + query_options_trajectories", "developer # guide). In this instance, the potential is effectively", "query if no demonstration is provided\" self.n_samples_summ = self._dempref_agent_parameters[\"n_samples_summ\"][ 0", "prior as the unit ball centered at 0: def sphere(w):", "/ np.linalg.norm(r) for r in all_samples]) return w_samples def get_trace(self,", "::inputs: ::current_weights: Irrelevant for DemPref; useful to other agents ::domain:", "query :generate_scenario: boolean for whether we want to generate the", ") ) ) for i in range(len(self.phi_prefs)) ] ) +", "self.phi_prefs = [] def load_demo(self, phi_demos: np.ndarray): \"\"\" Load the", "= pm.sample( 10000, tune=5000, return_inferencedata=True, init=\"adapt_diag\", ) # except: except", "Code adapted from original DemPref agent. \"\"\" def __init__( self,", "weights for gradient descent.\"\"\" self.reset() return self.w_samples def reset(self) ->", "BE USED WITH (incl_prev_QUERY AND NO DEMPREF). :param features: a", "update function.\" ) # feature vectors from demonstrated trajectories self.phi_demos", "for m in range(len(query_options)): for n in range(m): query_diffs.append( np.linalg.norm(", "Sampler. :param phi: a dictionary mapping rankings (0,...,query_option_count-1) to feature", "domain = args[0] w_samples = args[1] controls = np.array(controls) controls_set", "self.n_samples_summ = self._dempref_agent_parameters[\"n_samples_summ\"][ 0 ] self.n_samples_exp = self._dempref_agent_parameters[\"n_samples_exp\"][0] self.beta_demo =", "the objective function, evaluated on the given queries' features \"\"\"", "queries' features \"\"\" # features: query_option_count x feature_size # w_samples:", "low=-1, high=1, size=self.dim_features ) test_value = test_value / np.linalg.norm(test_value) norm", "), \"Cannot generate scenario when using approximate gradients\" self.update_func =", "return np.array(mean_w, copy=True).reshape(1, -1) def read_param_csv(self, which_csv: int = 0)", "self._dempref_agent_parameters[\"beta_teacher\"][0] \"\"\"If we want to save data as they did", "be 2; will throw an assertion error otherwise :param beta_demo:", "reset w/ appropriate seed before beginning # this query session;", "not using Dempref but using incl_prev_) :return: a list of", "int, trajectory_sample_count: int, trajectory_length: int, interaction_types: list = [], w_dim:", "columns=[\"run #\", \"pref_iter\", \"type\", \"value\"] ), ignore_index=True, ) def generate_query(", "removal objective function. Note: This objective uses the Plackett-Luce model", "to a dictionary: chosen_csv = sorted_csvs[-which_csv] df = pd.read_csv(chosen_csv) params_dict", "# -np.inf, self.update_function(w), ) return result try: # Potential is", ":beta_pref: the rationality parameter for the teacher selecting her query", "of each query :num_expectation_samples: number of w_samples to use in", "0 self._w_dim = w_dim assert ( self.update_func == \"pick_best\" or", "in DemPref:\"\"\" mean_w = np.mean(self.w_samples, axis=0) mean_w = mean_w /", "def sample(self, N: int, T: int = 1, burn: int", "far. :param N: number of w_samples to draw. :param T:", "), \"QueryGenerator.__init__: trajectory_length must be at least 1\" assert (", "def objective(features: List, w_samples: np.ndarray) -> float: \"\"\" Maximize the", "def rank_objective(features, w_samples) -> float: \"\"\" The ranking maximum volume", "query generation. Note: this class generates queries using approx gradients.", "query_options[n].trajectory ) ) ) query_diff = max(query_diffs) query = Query(", "[] for m in range(len(query_options)): for n in range(m): query_diffs.append(", "elif self.update_func == \"rank\": def update_function(distribution): result = ( tt.sum(", "return result self.update_function = update_function while True: test_value = np.random.uniform(", "a dictionary mapping rankings (0,...,query_option_count-1) to feature vectors \"\"\" result", "copy=True).reshape(1, -1) def read_param_csv(self, which_csv: int = 0) -> dict:", "the NUTS sampling algorithm (an extension of Hamilitonian Monte Carlo", "self.query_option_count == 2 ), \"Cannot use approximation to update function", "= update_function while True: test_value = np.random.uniform( low=-1, high=1, size=self.dim_features", "domain.features_from_trajectory( query_options[m].trajectory ) - domain.features_from_trajectory( query_options[n].trajectory ) ) ) query_diff", "save data as they did in DemPref:\"\"\" mean_w = np.mean(self.w_samples,", "an agent-parameterization .csv. ::inputs: :creation_index: A time-descending .csv file index.", "in self.cleaned_demos] class DemPrefSampler: \"\"\"Sample trajectories for querying. Code adapted", "= opt.fmin_l_bfgs_b( func, x0=np.random.uniform( low=self.num_new_queries * lower_input_bound, high=self.num_new_queries * upper_input_bound,", "w/ appropriate seed before beginning # this query session; domain.run(c)", "self._dempref_agent_parameters[\"teacher_type\"][0] self.n_demos = self._dempref_agent_parameters[\"n_demos\"][0] self.gen_demos = self._dempref_agent_parameters[\"gen_demos\"][0] self.opt_iter_count = self._dempref_agent_parameters[\"opt_iter_count\"][", "this difference is a trait of the DemPref codebase. z", "be used to approximate the objective function :return: the value", "return self.w_samples def reset(self) -> None: \"\"\"Prepare for new query", "self.dim_features)) # a list of np.arrays containing feature difference vectors", "10000, tune=5000, return_inferencedata=True, init=\"adapt_diag\", ) # except: except ( pm.SamplingError,", "= max(query_diffs) query = Query( query_type=Preference, task=None, start_state=query_state, trajectories=query_options, )", "the update_func used; the options are \"pick_best\", \"approx\", and \"rank\"", "= self._sampler.sample(N=self.n_samples_summ) \"\"\"If we want to save data as they", "self.incl_prev_query: self.all_query_choices = [d for d in self.cleaned_demos] class DemPrefSampler:", "* z] for i in range(self.num_new_queries) ] features_each_q_option = np.zeros(", "= self._dempref_agent_parameters[\"beta_teacher\"][0] \"\"\"If we want to save data as they", "int, curr_w: np.ndarray, verbose: bool = False, ) -> list:", "i = 1, 0 for i in range(len(rank) - 1):", ") if self.update_func == \"pick_best\": return -objective(features_each_q_option, w_samples) elif self.update_func", "of query_options # are epsilon apart: query_diff = 0 print(\"Generating", "import Query, Trajectory from inquire.interactions.modalities import Preference import matplotlib.pyplot as", "w_samples volumes_removed = [] rankings = itertools.permutations( list(range(self.num_queries)) ) #", "into the Sampler. :param demos: a Numpy array containing feature", "queries' features \"\"\" volumes_removed = [] for i in range(len(features)):", "proceeding # context: model = pm.Model() with model: # Add", "to query-option features; # load into sampler: features = [", "feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T)) / w_samples.shape[0] ) #", "] phi = {k: features[k] for k in range(len(query_options))} self._sampler.load_prefs(phi,", "DemPref:\"\"\" mean_w = np.mean(self.w_samples, axis=0) mean_w = mean_w / np.linalg.norm(mean_w)", "lower_input_bound, high=self.num_new_queries * upper_input_bound, size=(self.num_new_queries * z), ), args=(self.domain, w_samples),", "PL-update -tt.log( [ tt.sum( # sum down different feature-differences in", "( features[i] - features[1 - i] ) # 1 x", "self.update_func == \"approx\" or self.update_func == \"rank\" ), (\"Update\" \"", "func(controls: np.ndarray, *args) -> float: \"\"\"Minimize via L_BFGS. :param controls:", "by applying update_func on the demonstrations and preferences observed thus", "w_samples: np.ndarray, last_query_choice: Trajectory = None, blank_traj: bool = False,", "result elif self.update_func == \"rank\": def update_function(distribution): result = (", "opt_res[0][i * z : (i + 1) * z] for", "as pd import pymc3 as pm import pymc3.distributions.transforms as tr", "np.array([r / np.linalg.norm(r) for r in all_samples]) return w_samples def", "if feedback == []: # No feedback yet received return", "bool, generate_scenario: bool, update_func: str, beta_pref: float, ) -> None:", "the rationality parameter for the teacher selecting her query ::Inquire-specific", "Sort the .csvs in descending order by time of creation:", "and not blank_traj: return [last_query_choice] + query_options_trajectories else: return query_options_trajectories", "codebase adapts their model to PyMC3 version 3.11.2. We use", "and # which encode the ranking from the preference #", "\"\"\" self._sampler = None self._w_samples = None self._query_generator = None", "Path.cwd() / Path(\"../inquire/agents/\") # Sort the .csvs in descending order", "self._dempref_agent_parameters[\"n_demos\"][0] self.gen_demos = self._dempref_agent_parameters[\"gen_demos\"][0] self.opt_iter_count = self._dempref_agent_parameters[\"opt_iter_count\"][ 0 ] self.trim_start", "sum down different feature-differences in a single term in PL-update", "DemPref-specific sampler and query generator: \"\"\" self._sampler = None self._w_samples", "options that (locally) maximize the maximum volume removal objective. :param", "objects defined within the proceeding # context: model = pm.Model()", "= ( np.sum(np.dot(feature_diff, w_samples.T), axis=1) / w_samples.shape[0] ) # query_option_count", "value *= 1.0 / np.sum( np.exp( self.beta_pref * ( np.array(exp_rewards_sorted[i:])", "-> None: \"\"\" Initialize the approx query generation. Note: this", "not None: break if self._visualize: az.plot_trace(trace) plt.show() input(\"Press enter to", "rationality parameter for the teacher selecting her query ::Inquire-specific inputs:", "The previously selected query. Only required if self.incl_prev_query is True", "az.plot_trace(trace) plt.show() input(\"Press enter to continue\") az.plot_energy(trace) plt.show() input(\"Press enter", "self.df = self.df.append( pd.DataFrame( data, columns=[\"run #\", \"pref_iter\", \"type\", \"value\"]", "this query session; domain.run(c) will thus reset to appropriate #", "), (\"Update\" \" function must be one of the provided", "Load the demonstrations into the Sampler. :param demos: a Numpy", "Load the results of a preference query into the Sampler.", "is defined by applying update_func on the demonstrations and preferences", "domain: Environment) -> np.ndarray: \"\"\"Randomly initialize weights for gradient descent.\"\"\"", "PL-update tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i][ j:, : ] -", "np.append( features_each_q_option, domain.features_from_trajectory(last_query_choice), axis=1, ) if self.update_func == \"pick_best\": return", "x0=np.random.uniform( low=self.num_new_queries * lower_input_bound, high=self.num_new_queries * upper_input_bound, size=(self.num_new_queries * z),", "[] for key in sorted(phi.keys()): if key != rank: tmp.append(phi[key]", ") def generate_query( self, domain: Environment, query_state: int, curr_w: np.ndarray,", "self.demos = trajectories phi_demos = [ domain.features_from_trajectory(x.trajectory) for x in", "error otherwise :param beta_demo: parameter measuring irrationality of teacher in", "function :return: the value of the objective function for the", "features[i] - features[1 - i] ) # 1 x feature_size", "the maximum volume removal objective. :param w_samples: Samples of w", "self.phi_prefs[i][ j:, : ] - self.phi_prefs[i][j], distribution, ) ) )", "\"approx\": result = phi[rank] - phi[1 - rank] elif self.update_func", "options are \"rank\", \"pick_best\", and \"approx\". To use \"approx\", query_option_count", "for i in range(len(features)): feature_diff = np.array( [f - features[i]", "weights from the samples: mean_w = np.mean(self.w_samples, axis=0) mean_w =", "float: \"\"\"Minimize via L_BFGS. :param controls: an array, concatenated to", "each demonstration; has dimension n_dem -by- self.dim_features \"\"\" self.phi_demos =", "opt_res = opt.fmin_l_bfgs_b( func, x0=np.random.uniform( low=self.num_new_queries * lower_input_bound, high=self.num_new_queries *", ") return query def update_weights( self, current_weights: np.ndarray, domain: Environment,", "self._dempref_agent_parameters[\"n_samples_summ\"][ 0 ] self.n_samples_exp = self._dempref_agent_parameters[\"n_samples_exp\"][0] self.beta_demo = self._dempref_agent_parameters[\"beta_demo\"][0] self.beta_pref", "import pymc3 as pm import pymc3.distributions.transforms as tr import scipy.optimize", "self.include_previous_query else self.num_queries ) def generate_query_options( self, w_samples: np.ndarray, last_query_choice:", "is effectively # the model's log-likelihood. p = pm.Potential(\"sphere\", sphere(rv_x))", "None: self._sampler.clear_pref() self._sampler = self.DemPrefSampler( query_option_count=self.query_option_count, dim_features=self._w_dim, update_func=self.update_func, beta_demo=self.beta_demo, beta_pref=self.beta_pref,", "the provided options\") if self.incl_prev_query and self.teacher_type == \"term\": assert", "w_samples) else: return -rank_objective(features_each_q_option, w_samples) def objective(features: List, w_samples: np.ndarray)", "-1) def read_param_csv(self, which_csv: int = 0) -> dict: \"\"\"Read", "0, \"var\", var_w], ] self.df = self.df.append( pd.DataFrame( data, columns=[\"run", "1, all samples except each T^{th} sample are discarded :param", "return result elif self.update_func == \"rank\": def update_function(distribution): result =", "seeded with demonstrations. Note: We instantiate the agent according to", "model = pm.Model() with model: # Add random-variable x to", "True :param blank_traj: True is last_query_choice is blank. (Only True", "containing the feature values of each query :param w_samples: samples", "to generate the scenario -- i.e., other agents' behavior :update_func:", "objective uses the Plackett-Luce model of teacher behavior. CANNOT BE", "start = time.perf_counter() def func(controls: np.ndarray, *args) -> float: \"\"\"Minimize", "log-likelihood. p = pm.Potential(\"sphere\", sphere(rv_x)) trace = pm.sample( 10000, tune=5000,", "self._dempref_agent_parameters[\"n_pref_iters\"][0] self.epsilon = self._dempref_agent_parameters[\"epsilon\"][0] \"\"\" Instantiate the DemPref-specific sampler and", "\"\"\" An agent which uses demonstrations and preferences. Code adapted", "scenario when using approximate gradients\" self.update_func = update_func self.beta_pref =", "of each query option :param w_samples: w_samples of w used", "weights. ::inputs: ::current_weights: Irrelevant for DemPref; useful to other agents", "\"\"\"Randomly initialize weights for gradient descent.\"\"\" self.reset() return self.w_samples def", "task's environment ::feedback: A list of the human feedback received", "across w_samples v_removed = 1.0 - 1.0 / np.sum( np.exp(self.beta_pref", "trajectories: list, domain: Environment ) -> None: \"\"\"Generate demonstrations to", "query using approximate gradients. Code adapted from DemPref's ApproxQueryGenerator. \"\"\"", "pymc3 as pm import pymc3.distributions.transforms as tr import scipy.optimize as", "self.w_samples ) query_diffs = [] for m in range(len(query_options)): for", "2).sum() if norm <= 1: break # Get a sampling", "to save data as they did in DemPref:\"\"\" mean_w =", "max(query_diffs) query = Query( query_type=Preference, task=None, start_state=query_state, trajectories=query_options, ) return", "volume removal; the domain's # optimization is w.r.t. the linear", "on the given queries' features \"\"\" volumes_removed = [] for", "- phi[1 - rank] elif self.update_func == \"pick_best\": result, tmp", "test_value = test_value / np.linalg.norm(test_value) norm = (test_value ** 2).sum()", "time step :trajectory_length: the length of each query :num_expectation_samples: number", "for x in self.domain.control_bounds ] * self.trajectory_length opt_res = opt.fmin_l_bfgs_b(", "= np.zeros( (domain.w_dim, self.num_new_queries) ) for i, c in enumerate(controls_set):", "mapping rankings (0,...,query_option_count-1) to feature vectors \"\"\" result = []", "\"QueryGenerator.__init__: trajectory_length must be at least 1\" assert ( num_expectation_samples", "approximate gradients\" self.update_func = update_func self.beta_pref = beta_pref self.num_new_queries =", "* len(rank) for i in range(len(rank)): exp_rewards_sorted[rank[i]] = exp_rewards[i] value,", "must be False ) assert ( self.generate_scenario is False ),", "self.cleaned_demos = self.demos if self.incl_prev_query: self.all_query_choices = [d for d", "update_func: options are \"rank\", \"pick_best\", and \"approx\". To use \"approx\",", "query_options = self._query_generator.generate_query_options( self.w_samples, last_query_choice ) else: query_options = self._query_generator.generate_query_options(", "x in self.demos ] self._sampler.load_demo(np.array(phi_demos)) self.cleaned_demos = self.demos if self.incl_prev_query:", "return np.min(volumes_removed) def rank_objective(features, w_samples) -> float: \"\"\" The ranking", "self.beta_demo * tt.dot(self.phi_demos, distribution) ), ) return result self.update_function =", "!= rank: tmp.append(phi[key] - phi[rank]) result.extend(tmp) self.phi_prefs.append(np.array(result)) def clear_pref(self): \"\"\"Clear", "(and avoid Bad Initial Energy): while True: trace = self.get_trace(test_value)", "\"Cannot use approximation to update function if query_option_count > 2\"", "var_w = np.var(self.w_samples, axis=0) # Make sure to properly index", "= [Path(c) for c in sorted_csvs] # Select the indicated", "features = features.T volumes_removed = [] for i in range(len(features)):", "result = ( tt.sum( # sum across different queries [", "to what the the original paper's codebase designates as their", "key in sorted(phi.keys()): if key != rank: tmp.append(phi[key] - phi[rank])", "[ self.teacher_type, self.n_demos, self.trim_start, self.query_option_count, self.update_func, self.trajectory_length, self.incl_prev_query, self.gen_scenario, self.n_pref_iters,", "3.5; this codebase adapts their model to PyMC3 version 3.11.2.", ":update_func: the update_func used; the options are \"pick_best\", \"approx\", and", "= time.perf_counter() def func(controls: np.ndarray, *args) -> float: \"\"\"Minimize via", "self.all_query_choices[ self.random_scenario_index ] # Generate query_options while ensuring that features", "the most recent Choice in feedback: query_options = feedback[-1].choice.options choice", "epsilon apart: query_diff = 0 print(\"Generating query_options\") while query_diff <=", "NUTS sampling algorithm (an extension of Hamilitonian Monte Carlo MCMC):", "version 3.11.2. We use the NUTS sampling algorithm (an extension", "False, ): \"\"\" Initialize the sampler. :param query_option_count: Number of", "> features.shape[1]: features = features.T volumes_removed = [] for i", "Plackett-Luce model of teacher behavior. CANNOT BE USED WITH (incl_prev_QUERY", "- np.minimum( 1.0, np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed)", "controls: an array, concatenated to contain the control input for", "Monte Carlo MCMC): https://arxiv.org/abs/1111.4246. \"\"\" # Define update function: if", "# Define update function: if self.update_func == \"approx\": def update_function(distribution):", "z] for i in range(self.num_new_queries) ] end = time.perf_counter() print(f\"Finished", "is w.r.t. volume removal; the domain's # optimization is w.r.t.", "- 1.0 / np.sum( np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed) return", "useful to other agents ::domain: The task's environment ::feedback: A", "mean_w], [self.q_session_index, 0, \"var\", var_w], ] self.df = self.df.append( pd.DataFrame(", "if greater than 1, all samples except each T^{th} sample", "self.dim_features \"\"\" self.phi_demos = phi_demos def load_prefs(self, phi: Dict, rank):", "self.trajectory_length, approx_grad=True, ) query_options_controls = [ opt_res[0][i * z :", "as opt import theano.tensor as tt class DemPref(Agent): \"\"\"A preference-querying", "for querying. Code adapted from original DemPref agent. \"\"\" def", "last_query_choice.null: query_options = self._query_generator.generate_query_options( self.w_samples, blank_traj=True ) else: query_options =", "self._sampler.load_demo(np.array(phi_demos)) self.cleaned_demos = self.demos if self.incl_prev_query: self.all_query_choices = [d for", "update_func self.beta_demo = beta_demo self.beta_pref = beta_pref self._visualize = visualize", "seed the querying process.\"\"\" self.demos = trajectories phi_demos = [", "self.update_func == \"pick_best\" or self.update_func == \"approx\" or self.update_func ==", ") -> None: \"\"\" Initialize the approx query generation. Note:", ") ) ) volumes_removed.append(1 - value) return np.min(volumes_removed) # The", "data as they did in DemPref:\"\"\" self.first_q_session = True self.q_session_index", "created. \"\"\" data_path = Path.cwd() / Path(\"../inquire/agents/\") # Sort the", "# query_option_count x 1 -- summed across w_samples volumes_removed =", ") self.w_samples = self._sampler.sample(N=self.n_samples_summ) \"\"\"If we want to save data", "environment ::feedback: A list of the human feedback received to", "import pandas as pd import pymc3 as pm import pymc3.distributions.transforms", "w_samples def get_trace(self, test_val: np.ndarray) -> az.InferenceData: \"\"\"Create an MCMC", "sorted_csvs = [Path(c) for c in sorted_csvs] # Select the", "feature vectors from demonstrated trajectories self.phi_demos = np.zeros((1, self.dim_features)) #", "* len(rank) for i in range(len(rank)): result[i] = phi[rank[i]] elif", "self.phi_prefs = [] def sample(self, N: int, T: int =", "x 1 -- summed across w_samples v_removed = 1.0 -", "is provided\" self.n_samples_summ = self._dempref_agent_parameters[\"n_samples_summ\"][ 0 ] self.n_samples_exp = self._dempref_agent_parameters[\"n_samples_exp\"][0]", "queries is the previously selected query :generate_scenario: boolean for whether", "weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed) def approx_objective( features: np.ndarray, w_samples:", "* z), ), args=(self.domain, w_samples), bounds=self.domain.control_bounds * self.num_new_queries * self.trajectory_length,", "recent Choice in feedback: query_options = feedback[-1].choice.options choice = feedback[-1].choice.selection", "generate_scenario: bool, update_func: str, beta_pref: float, ) -> None: \"\"\"", "be \\ at least 1\" self.domain = dom self.num_queries =", "np.arrays containing feature difference vectors and # which encode the", "self._dempref_agent_parameters[\"domain\"][0] self.teacher_type = self._dempref_agent_parameters[\"teacher_type\"][0] self.n_demos = self._dempref_agent_parameters[\"n_demos\"][0] self.gen_demos = self._dempref_agent_parameters[\"gen_demos\"][0]", "Initialize the sampler. :param query_option_count: Number of queries. :param dim_features:", "a list containing the feature values of each query :param", "teacher in providing demonstrations :param beta_pref: parameter measuring irrationality of", "PyMC3 version 3.11.2. We use the NUTS sampling algorithm (an", "= self._dempref_agent_parameters[\"teacher_type\"][0] self.n_demos = self._dempref_agent_parameters[\"n_demos\"][0] self.gen_demos = self._dempref_agent_parameters[\"gen_demos\"][0] self.opt_iter_count =", "= [] rankings = itertools.permutations( list(range(self.num_queries)) ) # iterating over", "for j in range( self.query_option_count ) ] ) ) for", "\"value\"] ), ignore_index=True, ) def generate_query( self, domain: Environment, query_state:", "__init__( self, dom: Environment, num_queries: int, trajectory_length: int, num_expectation_samples: int,", "of the provided options\") if self.incl_prev_query and self.teacher_type == \"term\":", "function: if self.update_func == \"approx\": def update_function(distribution): result = tt.sum(", ") -> List[Trajectory]: \"\"\" Generate self.num_queries number of queries. This", "order by time of creation: all_files = np.array(list(Path.iterdir(data_path))) all_csvs =", "matplotlib.pyplot as plt import numpy as np import pandas as", ">= 1 ), \"QueryGenerator.__init__: trajectory_length must be at least 1\"", "approx_grad=True, ) query_options_controls = [ opt_res[0][i * z : (i", "self._dempref_agent_parameters[\"update_func\"][0] self.trajectory_length = self._dempref_agent_parameters[ \"trajectory_length\" ][0] self.incl_prev_query = self._dempref_agent_parameters[ \"incl_prev_query\"", "# Generate query_options while ensuring that features of query_options #", "\"term\": assert ( self.n_demos > 0 ), \"Cannot include previous", "= 1, burn: int = 1000) -> np.ndarray: \"\"\"Return N", "self, w_samples: np.ndarray, last_query_choice: Trajectory = None, blank_traj: bool =", ") ) ) for j in range( self.query_option_count ) ]", "the teacher selecting her query ::Inquire-specific inputs: :start_state: The state", "modified to be consist with the Inquire parlance. \"\"\" self.domain_name", "provided\" self.n_samples_summ = self._dempref_agent_parameters[\"n_samples_summ\"][ 0 ] self.n_samples_exp = self._dempref_agent_parameters[\"n_samples_exp\"][0] self.beta_demo", "# Define the prior as the unit ball centered at", "range(len(features)): feature_diff = np.array( [f - features[i] for f in", "# state: raw_trajectories = [ self.domain.run(c) for c in query_options_controls", "summed across w_samples v_removed = 1.0 - np.minimum( 1.0, np.exp(self.beta_pref", "sum across different queries [ tt.sum( # sum across different", "queries. This function produces query options that (locally) maximize the", "= np.array([str(f[0]).strip() for f in all_csvs]) sorted_csvs = sorted(all_csvs, key=os.path.getmtime)", "is the samples that will be used to approximate the", "), args=(self.domain, w_samples), bounds=self.domain.control_bounds * self.num_new_queries * self.trajectory_length, approx_grad=True, )", "Carlo MCMC): https://arxiv.org/abs/1111.4246. \"\"\" # Define update function: if self.update_func", "x in self.domain.control_bounds ] * self.trajectory_length opt_res = opt.fmin_l_bfgs_b( func,", "args[1] controls = np.array(controls) controls_set = [ controls[i * z", "point. DemPref utilizes only the most recent \"\"\" if feedback", "self.query_index = 0 self.config = [ self.teacher_type, self.n_demos, self.trim_start, self.query_option_count,", "features: query_option_count x feature_size # w_samples: n_samples x feature_size exp_rewards", "= query_option_count self.dim_features = dim_features self.update_func = update_func self.beta_demo =", "the the original paper's codebase designates as their main experiment.", "[ tt.sum( # sum down different feature-differences in a single", "f in all_files]) ] all_csvs = np.array([str(f[0]).strip() for f in", "preference query into the Sampler. :param phi: a dictionary mapping", "objective function, evaluated on the given queries' features \"\"\" volumes_removed", "volume removal objective function. Note: This objective uses the Plackett-Luce", "dim_features self.update_func = update_func self.beta_demo = beta_demo self.beta_pref = beta_pref", "1) * z] for i in range(self.num_new_queries) ] features_each_q_option =", "self.teacher_type, self.n_demos, self.trim_start, self.query_option_count, self.update_func, self.trajectory_length, self.incl_prev_query, self.gen_scenario, self.n_pref_iters, self.epsilon,", "is True :param blank_traj: True is last_query_choice is blank. (Only", "To use \"approx\", query_option_count must be 2; will throw an", "plt.show() input(\"Press enter to continue\") all_samples = trace.sel( draw=slice(burn, None)", "last_query_choice is blank. (Only True if not using Dempref but", "np.min(volumes_removed) # The following optimization is w.r.t. volume removal; the", "0 last_query_choice = self.all_query_choices[ self.random_scenario_index ] # Generate query_options while", "it to a dictionary: chosen_csv = sorted_csvs[-which_csv] df = pd.read_csv(chosen_csv)", "self._query_generator.generate_query_options( self.w_samples, blank_traj=True ) else: query_options = self._query_generator.generate_query_options( self.w_samples, last_query_choice", "= self.DemPrefSampler( query_option_count=self.query_option_count, dim_features=self._w_dim, update_func=self.update_func, beta_demo=self.beta_demo, beta_pref=self.beta_pref, visualize=self._visualize, ) self.w_samples", "::inputs: :creation_index: A time-descending .csv file index. e.g. if creation_index", "z), ), args=(self.domain, w_samples), bounds=self.domain.control_bounds * self.num_new_queries * self.trajectory_length, approx_grad=True,", "we needn't maintain a domain's start state; that's handled in", "all_csvs]) sorted_csvs = sorted(all_csvs, key=os.path.getmtime) sorted_csvs = [Path(c) for c", "* tt.dot(self.phi_demos, distribution) ), ) return result self.update_function = update_function", "# this query session; domain.run(c) will thus reset to appropriate", "are \"pick_best\", \"approx\", and \"rank\" :beta_pref: the rationality parameter for", "in all_samples]) return w_samples def get_trace(self, test_val: np.ndarray) -> az.InferenceData:", "1: break # Get a sampling trace (and avoid Bad", "] ) + tt.sum( self.beta_demo * tt.dot(self.phi_demos, distribution) ) return", "model to PyMC3 version 3.11.2. We use the NUTS sampling", "range(len(features)): feature_diff = ( features[i] - features[1 - i] )", "in PL-update tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i][ j:, : ]", "\"pref_iter\", \"type\", \"value\"] ), ignore_index=True, ) def generate_query( self, domain:", "will thus reset to appropriate # state: raw_trajectories = [", "Make sure to properly index data: if self.first_q_session: self.first_q_session =", "input(\"Press enter to continue\") all_samples = trace.sel( draw=slice(burn, None) ).posterior.rv_x.values", "weight_sample_count: int, trajectory_sample_count: int, trajectory_length: int, interaction_types: list = [],", "self, trajectories: list, domain: Environment ) -> None: \"\"\"Generate demonstrations", "w is part of the unit ball.\"\"\" w_sum = pm.math.sqr(w).sum()", "one of the queries is the previously selected query :generate_scenario:", "pm.SamplingError, pm.parallel_sampling.ParallelSamplingError, ): return None return trace class DemPrefQueryGenerator: \"\"\"Generate", "objective function, evaluated on the given queries' features \"\"\" if", "query_options\") while query_diff <= self.epsilon: if self.incl_prev_query: if last_query_choice.null: query_options", ") ) for i in range(len(self.phi_prefs)) ] ) + tt.sum(", "of teacher in providing demonstrations :param beta_pref: parameter measuring irrationality", "The distribution is defined by applying update_func on the demonstrations", "rankings to query-option features; # load into sampler: features =", "not blank_traj: features_each_q_option = np.append( features_each_q_option, domain.features_from_trajectory(last_query_choice), axis=1, ) if", "behavior :update_func: the update_func used; the options are \"pick_best\", \"approx\",", "args=(self.domain, w_samples), bounds=self.domain.control_bounds * self.num_new_queries * self.trajectory_length, approx_grad=True, ) query_options_controls", "are discarded :return: list of w_samples drawn \"\"\" \"\"\"Define model", "is part of the unit ball.\"\"\" w_sum = pm.math.sqr(w).sum() result", "removal objective. :param w_samples: Samples of w :param last_query_choice: The", "\"\"\" if features.shape[0] > features.shape[1]: features = features.T volumes_removed =", "USED WITH (incl_prev_QUERY AND NO DEMPREF). :param features: a list", "None: self._query_generator = self.DemPrefQueryGenerator( dom=domain, num_queries=self.query_option_count, trajectory_length=self.trajectory_length, num_expectation_samples=self.n_samples_exp, include_previous_query=self.incl_prev_query, generate_scenario=self.gen_scenario,", "a single term in PL-update tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i][", "self.query_option_count = query_option_count self.dim_features = dim_features self.update_func = update_func self.beta_demo", "applying update_func on the demonstrations and preferences observed thus far.", "\"\"\"Minimize via L_BFGS. :param controls: an array, concatenated to contain", "self.beta_pref = beta_pref self._visualize = visualize if self.update_func == \"approx\":", "query_diffs = [] for m in range(len(query_options)): for n in", "Trajectory = None, blank_traj: bool = False, ) -> List[Trajectory]:", "feedback: query_options = feedback[-1].choice.options choice = feedback[-1].choice.selection choice_index = query_options.index(choice)", "the queries is the previously selected query :generate_scenario: boolean for", "self._dempref_agent_parameters[ \"incl_prev_query\" ][0] self.gen_scenario = self._dempref_agent_parameters[\"gen_scenario\"][0] self.n_pref_iters = self._dempref_agent_parameters[\"n_pref_iters\"][0] self.epsilon", "which_csv: int = 0) -> dict: \"\"\"Read an agent-parameterization .csv.", "self.num_queries = num_queries self.trajectory_length = trajectory_length self.num_expectation_samples = num_expectation_samples self.include_previous_query", "= update_func self.beta_pref = beta_pref self.num_new_queries = ( self.num_queries -", "orginal codebase's 'runner.py' object. Note that some variable names are", "self.domain_name = self._dempref_agent_parameters[\"domain\"][0] self.teacher_type = self._dempref_agent_parameters[\"teacher_type\"][0] self.n_demos = self._dempref_agent_parameters[\"n_demos\"][0] self.gen_demos", "data as they did in DemPref:\"\"\" mean_w = np.mean(self.w_samples, axis=0)", "sampler. :param query_option_count: Number of queries. :param dim_features: Dimension of", "in range(len(query_options))} self._sampler.load_prefs(phi, choice_index) self.w_samples = self._sampler.sample(N=self.n_samples_summ) # Return the", "a trajectory begins. \"\"\" assert ( num_queries >= 1 ),", "these initial samples are discarded :return: list of w_samples drawn", "to use in approximating the objective function :include_previous_query: boolean for", "DemPref(Agent): \"\"\"A preference-querying agent seeded with demonstrations. Note: We instantiate", "computing queries in {end - start}s\") # Note the domain", "= np.var(self.w_samples, axis=0) # Make sure to properly index data:", "= self._dempref_agent_parameters[\"epsilon\"][0] \"\"\" Instantiate the DemPref-specific sampler and query generator:", ":return: the value of the objective function for the given", "random-variable x to model: rv_x = pm.Uniform( name=\"rv_x\", shape=self.dim_features, lower=-1,", "last_query_choice ) else: query_options = self._query_generator.generate_query_options( self.w_samples ) query_diffs =", "\"var\", var_w], ] self.df = self.df.append( pd.DataFrame( data, columns=[\"run #\",", "WITH (incl_prev_QUERY AND NO DEMPREF). :param features: a list containing", "inputs: :start_state: The state from which a trajectory begins. \"\"\"", "reset(self) -> None: \"\"\"Prepare for new query session.\"\"\" if self._sampler", "\"\"\" # Define update function: if self.update_func == \"approx\": def", "at least 1\" assert ( trajectory_length >= 1 ), \"QueryGenerator.__init__:", "generate_scenario=self.gen_scenario, update_func=self.update_func, beta_pref=self.beta_pref, ) if self.incl_prev_query: if len(self.demos) > 0:", "{end - start}s\") # Note the domain was reset w/", "beta_pref=self.beta_pref, visualize=self._visualize, ) self.w_samples = self._sampler.sample(N=self.n_samples_summ) \"\"\"If we want to", "at 0: def sphere(w): \"\"\"Determine if w is part of", "self._query_index = 0 self._w_dim = w_dim assert ( self.update_func ==", "\"\"\" if feedback == []: # No feedback yet received", "than 1, all samples except each T^{th} sample are discarded", "num_queries >= 1 ), \"QueryGenerator.__init__: num_queries must be at least", "int, interaction_types: list = [], w_dim: int = 4, which_param_csv:", "= False else: self.q_session_index += 1 data = [ [self.q_session_index,", "agent seeded with demonstrations. Note: We instantiate the agent according", "( np.sum(np.dot(feature_diff, w_samples.T), axis=1) / w_samples.shape[0] ) # query_option_count x", "= mean_w / np.linalg.norm(mean_w) return np.array(mean_w, copy=True).reshape(1, -1) def read_param_csv(self,", "parameter measuring irrationality of teacher in providing demonstrations :param beta_pref:", "start}s\") # Note the domain was reset w/ appropriate seed", "query_options_controls = [ opt_res[0][i * z : (i + 1)", "feature difference vectors and # which encode the ranking from", "is not None: break if self._visualize: az.plot_trace(trace) plt.show() input(\"Press enter", "( np.sum(np.dot(feature_diff, w_samples.T)) / w_samples.shape[0] ) # 1 x 1", "np.linalg.norm( domain.features_from_trajectory( query_options[m].trajectory ) - domain.features_from_trajectory( query_options[n].trajectory ) ) )", "self.incl_prev_query, self.gen_scenario, self.n_pref_iters, self.epsilon, self.n_samples_summ, self.n_samples_exp, self.beta_demo, self.beta_pref, self.beta_teacher, ]", "state; that's handled in inquire/tests/evaluation.py and the respective domain. \"\"\"", "containing feature difference vectors and # which encode the ranking", "(locally) maximize the maximum volume removal objective. :param w_samples: Samples", "\"\"\" The ranking maximum volume removal objective function. Note: This", "ignore_index=True, ) def generate_query( self, domain: Environment, query_state: int, curr_w:", "self.update_func, self.trajectory_length, self.incl_prev_query, self.gen_scenario, self.n_pref_iters, self.epsilon, self.n_samples_summ, self.n_samples_exp, self.beta_demo, self.beta_pref,", "'runner.py' object. Note that some variable names are modified to", "True self._q_session_index = 0 self._query_index = 0 self._w_dim = w_dim", "\"approx\": return -approx_objective(features_each_q_option, w_samples) else: return -rank_objective(features_each_q_option, w_samples) def objective(features:", ") if self.incl_prev_query: if len(self.demos) > 0: self.random_scenario_index = np.random.randint(len(self.demos))", "# Return the new weights from the samples: mean_w =", "np.sum(np.dot(feature_diff, w_samples.T)) / w_samples.shape[0] ) # 1 x 1 --", "Learning Reward Functions by Integrating Human Demonstrations and Preferences. \"\"\"", "\"pick_best\": result, tmp = [phi[rank] - phi[rank]], [] for key", "self.phi_demos = np.zeros((1, self.dim_features)) # a list of np.arrays containing", "generate the scenario -- i.e., other agents' behavior :update_func: the", "convert it to a dictionary: chosen_csv = sorted_csvs[-which_csv] df =", "from inquire.agents.agent import Agent from inquire.environments.environment import Environment from inquire.interactions.feedback", "using Dempref but using incl_prev_) :return: a list of trajectories", "\"\"\" Instance attributes from orginal codebase's 'runner.py' object. Note that", "if self.include_previous_query and not blank_traj: features_each_q_option = np.append( features_each_q_option, domain.features_from_trajectory(last_query_choice),", "dom=domain, num_queries=self.query_option_count, trajectory_length=self.trajectory_length, num_expectation_samples=self.n_samples_exp, include_previous_query=self.incl_prev_query, generate_scenario=self.gen_scenario, update_func=self.update_func, beta_pref=self.beta_pref, ) if", "in self.domain.control_bounds ] * self.trajectory_length upper_input_bound = [ x[1] for", "in all_files]) ] all_csvs = np.array([str(f[0]).strip() for f in all_csvs])", "= tt.switch( pm.math.gt(w_sum, 1.0), -100, # -np.inf, self.update_function(w), ) return", "the proceeding # context: model = pm.Model() with model: #", "are modified to be consist with the Inquire parlance. \"\"\"", "given set of controls \"\"\" domain = args[0] w_samples =", "the samples that will be used to approximate the objective", "if self._visualize: az.plot_trace(trace) plt.show() input(\"Press enter to continue\") az.plot_energy(trace) plt.show()", "return trace class DemPrefQueryGenerator: \"\"\"Generate queries. Code adapted from original", "import Preference import matplotlib.pyplot as plt import numpy as np", "] self.df = self.df.append( pd.DataFrame( data, columns=[\"run #\", \"pref_iter\", \"type\",", "is not None: self._sampler.clear_pref() self._sampler = self.DemPrefSampler( query_option_count=self.query_option_count, dim_features=self._w_dim, update_func=self.update_func,", "in providing demonstrations :param beta_pref: parameter measuring irrationality of teacher", "np import pandas as pd import pymc3 as pm import", "[] if self.update_func == \"rank\": result = [None] * len(rank)", "mean_w = mean_w / np.linalg.norm(mean_w) return np.array(mean_w, copy=True).reshape(1, -1) def", "domain: Environment ) -> None: \"\"\"Generate demonstrations to seed the", "self.update_function(w), ) return result try: # Potential is a \"potential", "mean_w = np.mean(self.w_samples, axis=0) mean_w = mean_w / np.linalg.norm(mean_w) return", "= np.append( features_each_q_option, domain.features_from_trajectory(last_query_choice), axis=1, ) if self.update_func == \"pick_best\":", "def __init__( self, dom: Environment, num_queries: int, trajectory_length: int, num_expectation_samples:", "-> list: \"\"\"Generate query using approximate gradients. Code adapted from", "avoid Bad Initial Energy): while True: trace = self.get_trace(test_value) if", "time.perf_counter() print(f\"Finished computing queries in {end - start}s\") # Note", "self.DemPrefQueryGenerator( dom=domain, num_queries=self.query_option_count, trajectory_length=self.trajectory_length, num_expectation_samples=self.n_samples_exp, include_previous_query=self.incl_prev_query, generate_scenario=self.gen_scenario, update_func=self.update_func, beta_pref=self.beta_pref, )", "* weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed) def approx_objective( features: np.ndarray,", "demonstration; has dimension n_dem -by- self.dim_features \"\"\" self.phi_demos = phi_demos", "= False, ): \"\"\" Initialize the sampler. :param query_option_count: Number", "result = tt.sum( [ -tt.nnet.relu( -self.beta_pref * tt.dot(self.phi_prefs[i], distribution) )", "reset to appropriate # state: raw_trajectories = [ self.domain.run(c) for", "= self._dempref_agent_parameters[\"n_samples_exp\"][0] self.beta_demo = self._dempref_agent_parameters[\"beta_demo\"][0] self.beta_pref = self._dempref_agent_parameters[\"beta_pref\"][0] self.beta_teacher =", "demonstration is provided\" self.n_samples_summ = self._dempref_agent_parameters[\"n_samples_summ\"][ 0 ] self.n_samples_exp =", "to seed the querying process.\"\"\" self.demos = trajectories phi_demos =", "but using incl_prev_) :return: a list of trajectories (queries) \"\"\"", "num_queries must be at least 1\" assert ( trajectory_length >=", "start state; that's handled in inquire/tests/evaluation.py and the respective domain.", "= False, ): \"\"\"Initialize the agent. Note we needn't maintain", "[f - features[i] for f in features] ) # query_option_count", "for whether one of the queries is the previously selected", ") -> np.ndarray: \"\"\"Update the model's learned weights. ::inputs: ::current_weights:", "recently created. \"\"\" data_path = Path.cwd() / Path(\"../inquire/agents/\") # Sort", "\".csv\" for f in all_files]) ] all_csvs = np.array([str(f[0]).strip() for", "save data as they did in DemPref:\"\"\" self.first_q_session = True", "np.linalg.norm(test_value) norm = (test_value ** 2).sum() if norm <= 1:", "all_samples.shape[0] * all_samples.shape[1], -1 ) w_samples = np.array([r / np.linalg.norm(r)", "demonstrations and preferences observed thus far. :param N: number of", "options are \"pick_best\", \"approx\", and \"rank\" :beta_pref: the rationality parameter", "possible rankings for rank in rankings: exp_rewards_sorted = [None] *", "\"\"\" Initialize the sampler. :param query_option_count: Number of queries. :param", "int, update_func: str = \"approx\", beta_demo: float = 0.1, beta_pref:", "= tt.sum( [ -tt.nnet.relu( -self.beta_pref * tt.dot(self.phi_prefs[i], distribution) ) for", "as an \"additional # tensor...to be added to the model", "[]: # No feedback yet received return self.w_samples else: #", "be consist with the Inquire parlance. \"\"\" self.domain_name = self._dempref_agent_parameters[\"domain\"][0]", "added to the model logp\"(PyMC3 developer # guide). In this", "tt.switch( pm.math.gt(w_sum, 1.0), -100, # -np.inf, self.update_function(w), ) return result", "recent \"\"\" if feedback == []: # No feedback yet", "is w.r.t. the linear combination of weights and # features;", "codebase designates as their main experiment. \"\"\" def __init__( self,", "return -objective(features_each_q_option, w_samples) elif self.update_func == \"approx\": return -approx_objective(features_each_q_option, w_samples)", "uses the Plackett-Luce model of teacher behavior. CANNOT BE USED", "variable names are modified to be consist with the Inquire", "self.num_queries - 1 if self.include_previous_query else self.num_queries ) def generate_query_options(", "different terms in PL-update -tt.log( [ tt.sum( # sum down", "linear combination of weights and # features; this difference is", "::feedback: A list of the human feedback received to this", "maximize the maximum volume removal objective. :param w_samples: Samples of", "given queries' features \"\"\" if features.shape[0] > features.shape[1]: features =", "\"\"\" self._dempref_agent_parameters = self.read_param_csv(which_param_csv) \"\"\" Instance attributes from orginal codebase's", "np.array([str(f[0]).strip() for f in all_csvs]) sorted_csvs = sorted(all_csvs, key=os.path.getmtime) sorted_csvs", "Preference import matplotlib.pyplot as plt import numpy as np import", "range(len(self.phi_prefs)) ] ) + tt.sum( self.beta_demo * tt.dot(self.phi_demos, distribution) ),", "tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i][ j:, : ] - self.phi_prefs[i][j],", "= test_value / np.linalg.norm(test_value) norm = (test_value ** 2).sum() if", "sorted(phi.keys()): if key != rank: tmp.append(phi[key] - phi[rank]) result.extend(tmp) self.phi_prefs.append(np.array(result))", "vectors \"\"\" result = [] if self.update_func == \"rank\": result", "self.incl_prev_query: if last_query_choice.null: query_options = self._query_generator.generate_query_options( self.w_samples, blank_traj=True ) else:", "if self.update_func == \"pick_best\": return -objective(features_each_q_option, w_samples) elif self.update_func ==", "beta_demo: float = 0.1, beta_pref: float = 1.0, visualize: bool", "= self.get_trace(test_value) if trace is not None: break if self._visualize:", "self.beta_pref * tt.dot( self.phi_prefs[i][ j:, : ] - self.phi_prefs[i][j], distribution,", "self.q_session_index = 0 self.query_index = 0 self.config = [ self.teacher_type,", "the Sampler. :param demos: a Numpy array containing feature vectors", "function. Note: This objective uses the Plackett-Luce model of teacher", "\"\"\" result = [] if self.update_func == \"rank\": result =", "), ) return result self.update_function = update_function while True: test_value", "domain was reset w/ appropriate seed before beginning # this", "( np.array(exp_rewards_sorted[i:]) - exp_rewards_sorted[i] ) ) ) volumes_removed.append(1 - value)", "Numpy array containing feature vectors for each demonstration; has dimension", "this codebase adapts their model to PyMC3 version 3.11.2. We", "volume removal objective. :param w_samples: Samples of w :param last_query_choice:", "= np.array( [f - features[i] for f in features] )", "self, domain: Environment, query_state: int, curr_w: np.ndarray, verbose: bool =", "test_value = np.random.uniform( low=-1, high=1, size=self.dim_features ) test_value = test_value", "objective. :param w_samples: Samples of w :param last_query_choice: The previously", "features_each_q_option = np.append( features_each_q_option, domain.features_from_trajectory(last_query_choice), axis=1, ) if self.update_func ==", "query_diff = 0 print(\"Generating query_options\") while query_diff <= self.epsilon: if", "beta_demo: parameter measuring irrationality of teacher in providing demonstrations :param", "): \"\"\"Initialize the agent. Note we needn't maintain a domain's", "of trajectories (queries) \"\"\" start = time.perf_counter() def func(controls: np.ndarray,", "list containing the feature values of each query :param w_samples:", ":return: the value of the objective function, evaluated on the", "T: int = 1, burn: int = 1000) -> np.ndarray:", "optimization is w.r.t. volume removal; the domain's # optimization is", "\"pick_best\": def update_function(distribution): result = tt.sum( [ -tt.log( tt.sum( tt.exp(", "self.update_func == \"approx\": return -approx_objective(features_each_q_option, w_samples) else: return -rank_objective(features_each_q_option, w_samples)", "combination of weights and # features; this difference is a", "appropriate # state: raw_trajectories = [ self.domain.run(c) for c in", "-rank_objective(features_each_q_option, w_samples) def objective(features: List, w_samples: np.ndarray) -> float: \"\"\"", "int, num_expectation_samples: int, include_previous_query: bool, generate_scenario: bool, update_func: str, beta_pref:", "self._trajectory_length = trajectory_length self._interaction_types = interaction_types self._visualize = visualize \"\"\"", "DemPref codebase. z = self.trajectory_length * self.domain.control_size lower_input_bound = [", "if self.incl_prev_query and self.teacher_type == \"term\": assert ( self.n_demos >", "self.domain = dom self.num_queries = num_queries self.trajectory_length = trajectory_length self.num_expectation_samples", "while query_diff <= self.epsilon: if self.incl_prev_query: if last_query_choice.null: query_options =", "float: \"\"\" Approximate the maximum volume removal objective. :param features:", "approximation to update function if query_option_count > 2\" elif not", "of controls \"\"\" domain = args[0] w_samples = args[1] controls", "options\") if self.incl_prev_query and self.teacher_type == \"term\": assert ( self.n_demos", "0: def sphere(w): \"\"\"Determine if w is part of the", "the potential is effectively # the model's log-likelihood. p =", "# No feedback yet received return self.w_samples else: # Use", "self.domain.control_size lower_input_bound = [ x[0] for x in self.domain.control_bounds ]", "self._dempref_agent_parameters[\"n_samples_exp\"][0] self.beta_demo = self._dempref_agent_parameters[\"beta_demo\"][0] self.beta_pref = self._dempref_agent_parameters[\"beta_pref\"][0] self.beta_teacher = self._dempref_agent_parameters[\"beta_teacher\"][0]", "for k in range(len(query_options))} self._sampler.load_prefs(phi, choice_index) self.w_samples = self._sampler.sample(N=self.n_samples_summ) #", "adapts their model to PyMC3 version 3.11.2. We use the", "did in DemPref:\"\"\" mean_w = np.mean(self.w_samples, axis=0) mean_w = mean_w", "len(rank) for i in range(len(rank)): result[i] = phi[rank[i]] elif self.update_func", "an array, concatenated to contain the control input for all", "generates queries using approx gradients. ::original inputs: :dom: the domain", ":, i ] = domain.features_from_trajectory( c, controls_as_input=True ) if self.include_previous_query", "visualize \"\"\" Get the pre-defined agent parameters \"\"\" self._dempref_agent_parameters =", "beta_pref: parameter measuring irrationality of teacher in selecting preferences \"\"\"", "][0] self.update_func = self._dempref_agent_parameters[\"update_func\"][0] self.trajectory_length = self._dempref_agent_parameters[ \"trajectory_length\" ][0] self.incl_prev_query", "Note: this class generates queries using approx gradients. ::original inputs:", "dim_features=self._w_dim, update_func=self.update_func, beta_demo=self.beta_demo, beta_pref=self.beta_pref, visualize=self._visualize, ) self.w_samples = self._sampler.sample(N=self.n_samples_summ) \"\"\"If", "query_options ] phi = {k: features[k] for k in range(len(query_options))}", "parameter for the teacher selecting her query ::Inquire-specific inputs: :start_state:", "or self.update_func == \"rank\" ), (\"Update\" \" function must be", "[ x[0] for x in self.domain.control_bounds ] * self.trajectory_length upper_input_bound", "values of each query option :param w_samples: w_samples of w", "context: model = pm.Model() with model: # Add random-variable x", "according to arguments corresponding to what the the original paper's", "= 1.0, visualize: bool = False, ): \"\"\" Initialize the", "to generate queries on :num_queries: number of queries to generate", "the domain to generate queries on :num_queries: number of queries", "1.0, np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed) def rank_objective(features,", "that features of query_options # are epsilon apart: query_diff =", "want to save data as they did in DemPref:\"\"\" self.first_q_session", ") - domain.features_from_trajectory( query_options[n].trajectory ) ) ) query_diff = max(query_diffs)", "0 for i in range(len(rank) - 1): value *= 1.0", "= self._sampler.sample(N=self.n_samples_summ) # Return the new weights from the samples:", "on the given queries' features \"\"\" # features: query_option_count x", "query def update_weights( self, current_weights: np.ndarray, domain: Environment, feedback: list", "query_diff = max(query_diffs) query = Query( query_type=Preference, task=None, start_state=query_state, trajectories=query_options,", "samples: mean_w = np.mean(self.w_samples, axis=0) mean_w = mean_w / np.linalg.norm(mean_w)", "= [None] * len(rank) for i in range(len(rank)): result[i] =", "beta_pref=self.beta_pref, ) if self.incl_prev_query: if len(self.demos) > 0: self.random_scenario_index =", "providing demonstrations :param beta_pref: parameter measuring irrationality of teacher in", "burn: int = 1000) -> np.ndarray: \"\"\"Return N samples from", "Human Demonstrations and Preferences. \"\"\" import itertools import os import", "the querying process.\"\"\" self.demos = trajectories phi_demos = [ domain.features_from_trajectory(x.trajectory)", ") -> None: \"\"\"Generate demonstrations to seed the querying process.\"\"\"", "input(\"Press enter to continue\") az.plot_energy(trace) plt.show() input(\"Press enter to continue\")", "list of trajectories (queries) \"\"\" start = time.perf_counter() def func(controls:", "* self.trajectory_length opt_res = opt.fmin_l_bfgs_b( func, x0=np.random.uniform( low=self.num_new_queries * lower_input_bound,", "time from pathlib import Path from typing import Dict, List", "distribution. The distribution is defined by applying update_func on the", "str = \"approx\", beta_demo: float = 0.1, beta_pref: float =", ":param beta_demo: parameter measuring irrationality of teacher in providing demonstrations", "use the dempref dempref_agent.csv most recently created. \"\"\" data_path =", "valid update function.\" ) # feature vectors from demonstrated trajectories", "query_option_count: int, dim_features: int, update_func: str = \"approx\", beta_demo: float", "= self._query_generator.generate_query_options( self.w_samples ) query_diffs = [] for m in", "has dimension n_dem -by- self.dim_features \"\"\" self.phi_demos = phi_demos def", "1.0 / np.sum( np.exp(self.beta_pref * weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed)", "# feature vectors from demonstrated trajectories self.phi_demos = np.zeros((1, self.dim_features))", "controls = np.array(controls) controls_set = [ controls[i * z :", "feedback[-1].choice.options choice = feedback[-1].choice.selection choice_index = query_options.index(choice) if self.incl_prev_query: self.all_query_choices[self.random_scenario_index]", "w.r.t. volume removal; the domain's # optimization is w.r.t. the", "if key != rank: tmp.append(phi[key] - phi[rank]) result.extend(tmp) self.phi_prefs.append(np.array(result)) def", "self, weight_sample_count: int, trajectory_sample_count: int, trajectory_length: int, interaction_types: list =", "to other agents ::domain: The task's environment ::feedback: A list", "last_query_choice: Trajectory = None, blank_traj: bool = False, ) ->", "state from which a trajectory begins. \"\"\" assert ( num_queries", "rank: tmp.append(phi[key] - phi[rank]) result.extend(tmp) self.phi_prefs.append(np.array(result)) def clear_pref(self): \"\"\"Clear all", "else: self.random_scenario_index = 0 last_query_choice = self.all_query_choices[ self.random_scenario_index ] #", "z = self.trajectory_length * self.domain.control_size lower_input_bound = [ x[0] for", "else self.num_queries ) def generate_query_options( self, w_samples: np.ndarray, last_query_choice: Trajectory", "# a list of np.arrays containing feature difference vectors and", "term in PL-update tt.exp( self.beta_pref * tt.dot( self.phi_prefs[i][ j:, :", "all_files[ np.argwhere([f.suffix == \".csv\" for f in all_files]) ] all_csvs", "1 -- summed across w_samples v_removed = 1.0 - 1.0", "controls \"\"\" domain = args[0] w_samples = args[1] controls =", "= all_files[ np.argwhere([f.suffix == \".csv\" for f in all_files]) ]", "(incl_prev_QUERY AND NO DEMPREF). :param features: a list containing the", "volumes_removed = [] for i in range(len(features)): feature_diff = np.array(", "feature_diff = np.array( [f - features[i] for f in features]", "0, visualize: bool = False, ): \"\"\"Initialize the agent. Note", "np.ndarray, *args) -> float: \"\"\"Minimize via L_BFGS. :param controls: an", "for c in query_options_controls ] raw_phis = [ self.domain.features_from_trajectory(t) for", "(an extension of Hamilitonian Monte Carlo MCMC): https://arxiv.org/abs/1111.4246. \"\"\" #", "import matplotlib.pyplot as plt import numpy as np import pandas", "assert ( num_expectation_samples >= 1 ), \"QueryGenerator.__init__: num_expectation_samples must be", "feature vectors \"\"\" result = [] if self.update_func == \"rank\":", "= beta_demo self.beta_pref = beta_pref self._visualize = visualize if self.update_func", "if self.incl_prev_query is True :param blank_traj: True is last_query_choice is", "else: return -rank_objective(features_each_q_option, w_samples) def objective(features: List, w_samples: np.ndarray) ->", "that will be used to approximate the objective function :return:", "1 x feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T)) / w_samples.shape[0]", "generation. Note: this class generates queries using approx gradients. ::original", "== \"pick_best\": return -objective(features_each_q_option, w_samples) elif self.update_func == \"approx\": return", "x feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T)) / w_samples.shape[0] )", "List[Trajectory]: \"\"\" Generate self.num_queries number of queries. This function produces", "encode the ranking from the preference # queries self.phi_prefs =", ".csvs in descending order by time of creation: all_files =", ":param args: the first argument is the domain, and the", "<= self.epsilon: if self.incl_prev_query: if last_query_choice.null: query_options = self._query_generator.generate_query_options( self.w_samples,", "): \"\"\" Initialize the sampler. :param query_option_count: Number of queries.", "Environment, feedback: list ) -> np.ndarray: \"\"\"Update the model's learned", "objective function for the given set of controls \"\"\" domain", "preferences \"\"\" self.query_option_count = query_option_count self.dim_features = dim_features self.update_func =", "received return self.w_samples else: # Use the most recent Choice", "] self._sampler.load_demo(np.array(phi_demos)) self.cleaned_demos = self.demos if self.incl_prev_query: self.all_query_choices = [d", "m in range(len(query_options)): for n in range(m): query_diffs.append( np.linalg.norm( domain.features_from_trajectory(", "self.include_previous_query and not blank_traj: features_each_q_option = np.append( features_each_q_option, domain.features_from_trajectory(last_query_choice), axis=1,", "to generate at each time step :trajectory_length: the length of", ":num_queries: number of queries to generate at each time step", "\"approx\", beta_demo: float = 0.1, beta_pref: float = 1.0, visualize:", "include_previous_query self.generate_scenario = ( generate_scenario # Currently must be False", "the previously selected query :generate_scenario: boolean for whether we want", "= query_options.index(choice) if self.incl_prev_query: self.all_query_choices[self.random_scenario_index] = choice # Create dictionary", "= update_func self.beta_demo = beta_demo self.beta_pref = beta_pref self._visualize =", "defined by applying update_func on the demonstrations and preferences observed", "numpy as np import pandas as pd import pymc3 as", "#\", \"pref_iter\", \"type\", \"value\"] ), ignore_index=True, ) def generate_query( self,", "range(len(query_options)): for n in range(m): query_diffs.append( np.linalg.norm( domain.features_from_trajectory( query_options[m].trajectory )", ") volumes_removed.append(v_removed) return np.min(volumes_removed) def approx_objective( features: np.ndarray, w_samples: np.ndarray", "features_each_q_option = np.zeros( (domain.w_dim, self.num_new_queries) ) for i, c in", "# 1 x 1 -- summed across w_samples v_removed =", "trajectory_length: int, interaction_types: list = [], w_dim: int = 4,", "== \"approx\" or self.update_func == \"rank\" ), (\"Update\" \" function", "\"\"\" Generate self.num_queries number of queries. This function produces query", "-by- self.dim_features \"\"\" self.phi_demos = phi_demos def load_prefs(self, phi: Dict,", "::original inputs: :dom: the domain to generate queries on :num_queries:", "rankings = itertools.permutations( list(range(self.num_queries)) ) # iterating over all possible", "generator: \"\"\" self._sampler = None self._w_samples = None self._query_generator =", "codebase. z = self.trajectory_length * self.domain.control_size lower_input_bound = [ x[0]", "( pm.SamplingError, pm.parallel_sampling.ParallelSamplingError, ): return None return trace class DemPrefQueryGenerator:", "1 x 1 -- summed across w_samples v_removed = 1.0", "adapted from DemPref's ApproxQueryGenerator. \"\"\" if self._query_generator is None: self._query_generator", "-- summed across w_samples v_removed = 1.0 - 1.0 /", "sampler.\"\"\" self.phi_prefs = [] def sample(self, N: int, T: int", "+ 1) * z] for i in range(self.num_new_queries) ] features_each_q_option", "result try: # Potential is a \"potential term\" defined as", "float, ) -> None: \"\"\" Initialize the approx query generation.", "np.var(self.w_samples, axis=0) # Make sure to properly index data: if", "at least 1\" assert ( num_expectation_samples >= 1 ), \"QueryGenerator.__init__:", "Approximate the maximum volume removal objective. :param features: the feature", "index data: if self.first_q_session: self.first_q_session = False else: self.q_session_index +=", "previously selected query :generate_scenario: boolean for whether we want to", "= self._dempref_agent_parameters[\"domain\"][0] self.teacher_type = self._dempref_agent_parameters[\"teacher_type\"][0] self.n_demos = self._dempref_agent_parameters[\"n_demos\"][0] self.gen_demos =", ") for i in range(len(self.phi_prefs)) ] ) + tt.sum( self.beta_demo", "= trace.sel( draw=slice(burn, None) ).posterior.rv_x.values all_samples = all_samples.reshape( all_samples.shape[0] *", "> 2\" elif not ( self.update_func == \"rank\" or self.update_func", "False, ): \"\"\"Initialize the agent. Note we needn't maintain a", "np.sum( np.exp( self.beta_pref * ( np.array(exp_rewards_sorted[i:]) - exp_rewards_sorted[i] ) )", "w_samples: samples of w, used to approximate the objective :return:", "-> float: \"\"\" Maximize the volume removal objective. :param features:", "for i in range(len(rank)): result[i] = phi[rank[i]] elif self.update_func ==", "the results of a preference query into the Sampler. :param", "The state from which a trajectory begins. \"\"\" assert (", "queries to generate at each time step :trajectory_length: the length", ":creation_index: A time-descending .csv file index. e.g. if creation_index =", "\"approx\": def update_function(distribution): result = tt.sum( [ -tt.nnet.relu( -self.beta_pref *", "beta_pref self.num_new_queries = ( self.num_queries - 1 if self.include_previous_query else", ") ) volumes_removed.append(1 - value) return np.min(volumes_removed) # The following", "= itertools.permutations( list(range(self.num_queries)) ) # iterating over all possible rankings", "MCMC. NOTE the DemPref codebase creates a sampler via PyMC3", "self.incl_prev_query: if len(self.demos) > 0: self.random_scenario_index = np.random.randint(len(self.demos)) else: self.random_scenario_index", "trajectory begins. \"\"\" assert ( num_queries >= 1 ), \"QueryGenerator.__init__:", "i.e., other agents' behavior :update_func: the update_func used; the options", "* tt.dot( self.phi_prefs[i], distribution ) ) ) ) for i", "The ranking maximum volume removal objective function. Note: This objective", "phi_demos def load_prefs(self, phi: Dict, rank): \"\"\" Load the results", "all possible rankings for rank in rankings: exp_rewards_sorted = [None]", "if self.incl_prev_query: self.all_query_choices[self.random_scenario_index] = choice # Create dictionary map from", "DemPref codebase creates a sampler via PyMC3 version 3.5; this", "np.mean(self.w_samples, axis=0) mean_w = mean_w / np.linalg.norm(mean_w) var_w = np.var(self.w_samples,", ":param burn: how many samples before the chain converges; these", "pm.Uniform( name=\"rv_x\", shape=self.dim_features, lower=-1, upper=1, testval=test_val, ) # Define the", "if len(self.demos) > 0: self.random_scenario_index = np.random.randint(len(self.demos)) else: self.random_scenario_index =", "= (test_value ** 2).sum() if norm <= 1: break #", "self._dempref_agent_parameters[\"opt_iter_count\"][ 0 ] self.trim_start = self._dempref_agent_parameters[\"trim_start\"][0] self.query_option_count = self._dempref_agent_parameters[ \"query_option_count\"", "size=(self.num_new_queries * z), ), args=(self.domain, w_samples), bounds=self.domain.control_bounds * self.num_new_queries *", "the given set of controls \"\"\" domain = args[0] w_samples", "domain.features_from_trajectory(x.trajectory) for x in query_options ] phi = {k: features[k]", "self.reset() return self.w_samples def reset(self) -> None: \"\"\"Prepare for new", "pathlib import Path from typing import Dict, List import arviz", "Dict, rank): \"\"\" Load the results of a preference query", "across w_samples v_removed = 1.0 - np.minimum( 1.0, np.exp(self.beta_pref *", "def reset(self) -> None: \"\"\"Prepare for new query session.\"\"\" if", "x feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T), axis=1) / w_samples.shape[0]", "of queries to generate at each time step :trajectory_length: the", "self.df = pd.DataFrame(columns=[\"run #\", \"pref_iter\", \"type\", \"value\"]) def initialize_weights(self, domain:", "the prior as the unit ball centered at 0: def", "Dimension of feature vectors. :param update_func: options are \"rank\", \"pick_best\",", "include_previous_query: bool, generate_scenario: bool, update_func: str, beta_pref: float, ) ->", "high=1, size=self.dim_features ) test_value = test_value / np.linalg.norm(test_value) norm =", "\"\"\" Initialize the approx query generation. Note: this class generates", "tt.dot(self.phi_demos, distribution) ), ) return result self.update_function = update_function while", "# Select the indicated .csv and convert it to a", "): return None return trace class DemPrefQueryGenerator: \"\"\"Generate queries. Code", "def generate_query( self, domain: Environment, query_state: int, curr_w: np.ndarray, verbose:", ") # 1 x 1 -- summed across w_samples v_removed", "https://arxiv.org/abs/1111.4246. \"\"\" # Define update function: if self.update_func == \"approx\":", "i in range(len(self.phi_prefs)) ] ) + tt.sum( self.beta_demo * tt.dot(self.phi_demos,", "the control input for all queries :param args: the first", "self._visualize: az.plot_trace(trace) plt.show() input(\"Press enter to continue\") az.plot_energy(trace) plt.show() input(\"Press", "range(len(rank) - 1): value *= 1.0 / np.sum( np.exp( self.beta_pref", "in range(m): query_diffs.append( np.linalg.norm( domain.features_from_trajectory( query_options[m].trajectory ) - domain.features_from_trajectory( query_options[n].trajectory", "phi[rank[i]] elif self.update_func == \"approx\": result = phi[rank] - phi[1", "in a single term in PL-update tt.exp( self.beta_pref * tt.dot(", "( trajectory_length >= 1 ), \"QueryGenerator.__init__: trajectory_length must be at", "the original paper's codebase designates as their main experiment. \"\"\"", "return None return trace class DemPrefQueryGenerator: \"\"\"Generate queries. Code adapted", "input(\"Press enter to continue\") az.plot_posterior(trace) plt.show() input(\"Press enter to continue\")", "opt import theano.tensor as tt class DemPref(Agent): \"\"\"A preference-querying agent", "The task's environment ::feedback: A list of the human feedback", "[] for i in range(len(features)): feature_diff = np.array( [f -", "self.w_samples, last_query_choice ) else: query_options = self._query_generator.generate_query_options( self.w_samples ) query_diffs", "as tr import scipy.optimize as opt import theano.tensor as tt", "-> np.ndarray: \"\"\"Update the model's learned weights. ::inputs: ::current_weights: Irrelevant", "= ( generate_scenario # Currently must be False ) assert", "their main experiment. \"\"\" def __init__( self, weight_sample_count: int, trajectory_sample_count:", "demonstrated trajectories self.phi_demos = np.zeros((1, self.dim_features)) # a list of", "= 1000) -> np.ndarray: \"\"\"Return N samples from the distribution.", "3.11.2. We use the NUTS sampling algorithm (an extension of", "* tt.dot(self.phi_demos, distribution) ) return result elif self.update_func == \"pick_best\":", "self._dempref_agent_parameters = self.read_param_csv(which_param_csv) \"\"\" Instance attributes from orginal codebase's 'runner.py'", "dom self.num_queries = num_queries self.trajectory_length = trajectory_length self.num_expectation_samples = num_expectation_samples", "sampling trace (and avoid Bad Initial Energy): while True: trace", "params_dict def process_demonstrations( self, trajectories: list, domain: Environment ) ->", "the DemPref codebase. z = self.trajectory_length * self.domain.control_size lower_input_bound =", "which_param_csv: int = 0, visualize: bool = False, ): \"\"\"Initialize", "= self.all_query_choices[ self.random_scenario_index ] # Generate query_options while ensuring that", "== \"approx\": return -approx_objective(features_each_q_option, w_samples) else: return -rank_objective(features_each_q_option, w_samples) def", "w_samples.shape[0] ) # 1 x 1 -- summed across w_samples", ":return: a list of trajectories (queries) \"\"\" start = time.perf_counter()", "num_expectation_samples >= 1 ), \"QueryGenerator.__init__: num_expectation_samples must be \\ at", "( self.num_queries - 1 if self.include_previous_query else self.num_queries ) def", "if self._sampler is not None: self._sampler.clear_pref() self._sampler = self.DemPrefSampler( query_option_count=self.query_option_count,", "-> az.InferenceData: \"\"\"Create an MCMC trace.\"\"\" # model accumulates the", "-self.beta_pref * tt.dot(self.phi_prefs[i], distribution) ) for i in range(len(self.phi_prefs)) ]", "dom: Environment, num_queries: int, trajectory_length: int, num_expectation_samples: int, include_previous_query: bool,", "# 1 x feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T)) /", "tt.dot(self.phi_prefs[i], distribution) ) for i in range(len(self.phi_prefs)) ] ) +", "Code adapted from DemPref's ApproxQueryGenerator. \"\"\" if self._query_generator is None:", "::domain: The task's environment ::feedback: A list of the human", "in range(len(rank) - 1): value *= 1.0 / np.sum( np.exp(", "a valid update function.\" ) # feature vectors from demonstrated", "num_expectation_samples=self.n_samples_exp, include_previous_query=self.incl_prev_query, generate_scenario=self.gen_scenario, update_func=self.update_func, beta_pref=self.beta_pref, ) if self.incl_prev_query: if len(self.demos)", "def sphere(w): \"\"\"Determine if w is part of the unit", "one of the provided options\") if self.incl_prev_query and self.teacher_type ==", "Demonstrations and Preferences. \"\"\" import itertools import os import time", "agent-parameterization .csv. ::inputs: :creation_index: A time-descending .csv file index. e.g.", "- start}s\") # Note the domain was reset w/ appropriate", "removal objective. :param features: the feature values of each query", "== \"pick_best\" or self.update_func == \"approx\" or self.update_func == \"rank\"", "self.domain.control_bounds ] * self.trajectory_length upper_input_bound = [ x[1] for x", "queries on :num_queries: number of queries to generate at each", "i] ) # 1 x feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff,", "value, i = 1, 0 for i in range(len(rank) -", "contain the control input for all queries :param args: the", "different feature-differences in a single term in PL-update tt.exp( self.beta_pref", "an MCMC trace.\"\"\" # model accumulates the objects defined within", "\"\"\"Prepare for new query session.\"\"\" if self._sampler is not None:", "for whether we want to generate the scenario -- i.e.,", "rank] elif self.update_func == \"pick_best\": result, tmp = [phi[rank] -", ") # iterating over all possible rankings for rank in", "for DemPref; useful to other agents ::domain: The task's environment", "Reward Functions by Integrating Human Demonstrations and Preferences. \"\"\" import", "volumes_removed.append(v_removed) return np.min(volumes_removed) def rank_objective(features, w_samples) -> float: \"\"\" The", "bool = False, ): \"\"\" Initialize the sampler. :param query_option_count:", "each T^{th} sample are discarded :param burn: how many samples", "class DemPref(Agent): \"\"\"A preference-querying agent seeded with demonstrations. Note: We", "return params_dict def process_demonstrations( self, trajectories: list, domain: Environment )", "must be at least 1\" assert ( num_expectation_samples >= 1", "(i + 1) * z] for i in range(self.num_new_queries) ]", "Generate query_options while ensuring that features of query_options # are", "self._visualize = visualize if self.update_func == \"approx\": assert ( self.query_option_count", "volumes_removed = [] rankings = itertools.permutations( list(range(self.num_queries)) ) # iterating", "the distribution. The distribution is defined by applying update_func on", "/ np.linalg.norm(mean_w) return np.array(mean_w, copy=True).reshape(1, -1) def read_param_csv(self, which_csv: int", "Bad Initial Energy): while True: trace = self.get_trace(test_value) if trace", "model accumulates the objects defined within the proceeding # context:", "p = pm.Potential(\"sphere\", sphere(rv_x)) trace = pm.sample( 10000, tune=5000, return_inferencedata=True,", "of w_samples to use in approximating the objective function :include_previous_query:", "num_expectation_samples self.include_previous_query = include_previous_query self.generate_scenario = ( generate_scenario # Currently", "* z : (i + 1) * z] for i", "if no demonstration is provided\" self.n_samples_summ = self._dempref_agent_parameters[\"n_samples_summ\"][ 0 ]", "# sum across different queries [ tt.sum( # sum across", "i, c in enumerate(controls_set): features_each_q_option[ :, i ] = domain.features_from_trajectory(", "adapted from Learning Reward Functions by Integrating Human Demonstrations and", "function.\" ) # feature vectors from demonstrated trajectories self.phi_demos =", "enter to continue\") all_samples = trace.sel( draw=slice(burn, None) ).posterior.rv_x.values all_samples", "> 0: self.random_scenario_index = np.random.randint(len(self.demos)) else: self.random_scenario_index = 0 last_query_choice", "*= 1.0 / np.sum( np.exp( self.beta_pref * ( np.array(exp_rewards_sorted[i:]) -", "weight_sample_count self._trajectory_sample_count = trajectory_sample_count self._trajectory_length = trajectory_length self._interaction_types = interaction_types", "== \"rank\": def update_function(distribution): result = ( tt.sum( # sum", "= [ [self.q_session_index, 0, \"mean\", mean_w], [self.q_session_index, 0, \"var\", var_w],", "trajectories self.phi_demos = np.zeros((1, self.dim_features)) # a list of np.arrays", "n_samples x feature_size exp_rewards = ( np.sum(np.dot(features, w_samples.T), axis=1) /", "= pm.math.sqr(w).sum() result = tt.switch( pm.math.gt(w_sum, 1.0), -100, # -np.inf,", "w_samples v_removed = 1.0 - np.minimum( 1.0, np.exp(self.beta_pref * weighted_feature_diff)", "else: self.q_session_index += 1 data = [ [self.q_session_index, 0, \"mean\",", "to approximate the objective function :return: the value of the", "self.get_trace(test_value) if trace is not None: break if self._visualize: az.plot_trace(trace)", "the linear combination of weights and # features; this difference", "in self.domain.control_bounds ] * self.trajectory_length opt_res = opt.fmin_l_bfgs_b( func, x0=np.random.uniform(", "trace.\"\"\" # model accumulates the objects defined within the proceeding", "tmp = [phi[rank] - phi[rank]], [] for key in sorted(phi.keys()):", "self.update_func = update_func self.beta_demo = beta_demo self.beta_pref = beta_pref self._visualize", "int, dim_features: int, update_func: str = \"approx\", beta_demo: float =", "phi[rank]) result.extend(tmp) self.phi_prefs.append(np.array(result)) def clear_pref(self): \"\"\"Clear all preference information from", "import arviz as az from inquire.agents.agent import Agent from inquire.environments.environment", "= pm.Uniform( name=\"rv_x\", shape=self.dim_features, lower=-1, upper=1, testval=test_val, ) # Define", "\"rank\" :beta_pref: the rationality parameter for the teacher selecting her", "removal; the domain's # optimization is w.r.t. the linear combination", "self.beta_demo = beta_demo self.beta_pref = beta_pref self._visualize = visualize if", "self.first_q_session = True self.q_session_index = 0 self.query_index = 0 self.config", "i in range(len(rank) - 1): value *= 1.0 / np.sum(", "trajectory_sample_count: int, trajectory_length: int, interaction_types: list = [], w_dim: int", "using incl_prev_) :return: a list of trajectories (queries) \"\"\" start", "task=None, start_state=query_state, trajectories=query_options, ) return query def update_weights( self, current_weights:", "trajectory_length self._interaction_types = interaction_types self._visualize = visualize \"\"\" Get the", "\"additional # tensor...to be added to the model logp\"(PyMC3 developer", "the agent according to arguments corresponding to what the the", "= None self._query_generator = None self._first_q_session = True self._q_session_index =", "be added to the model logp\"(PyMC3 developer # guide). In", "exp_rewards_sorted[i] ) ) ) volumes_removed.append(1 - value) return np.min(volumes_removed) #", "-1 ) w_samples = np.array([r / np.linalg.norm(r) for r in", "accumulates the objects defined within the proceeding # context: model", "= ( np.sum(np.dot(features, w_samples.T), axis=1) / w_samples.shape[0] ) # query_option_count", "= 0.1, beta_pref: float = 1.0, visualize: bool = False,", "int = 0) -> dict: \"\"\"Read an agent-parameterization .csv. ::inputs:", ":param blank_traj: True is last_query_choice is blank. (Only True if", "Environment, query_state: int, curr_w: np.ndarray, verbose: bool = False, )", "] * self.trajectory_length opt_res = opt.fmin_l_bfgs_b( func, x0=np.random.uniform( low=self.num_new_queries *", "Number of queries. :param dim_features: Dimension of feature vectors. :param", "their model to PyMC3 version 3.11.2. We use the NUTS", "inquire.interactions.feedback import Query, Trajectory from inquire.interactions.modalities import Preference import matplotlib.pyplot", "query_type=Preference, task=None, start_state=query_state, trajectories=query_options, ) return query def update_weights( self,", "function must be one of the provided options\") if self.incl_prev_query", "was reset w/ appropriate seed before beginning # this query", "exp_rewards[i] value, i = 1, 0 for i in range(len(rank)", "apart: query_diff = 0 print(\"Generating query_options\") while query_diff <= self.epsilon:", "objective function :return: the value of the objective function for", "( generate_scenario # Currently must be False ) assert (", "results of a preference query into the Sampler. :param phi:", "self.demos if self.incl_prev_query: self.all_query_choices = [d for d in self.cleaned_demos]", "= self._dempref_agent_parameters[\"update_func\"][0] self.trajectory_length = self._dempref_agent_parameters[ \"trajectory_length\" ][0] self.incl_prev_query = self._dempref_agent_parameters[", "tt.dot( self.phi_prefs[i], distribution ) ) ) ) for i in", "az.plot_posterior(trace) plt.show() input(\"Press enter to continue\") all_samples = trace.sel( draw=slice(burn,", "# guide). In this instance, the potential is effectively #", "\"\"\"Update the model's learned weights. ::inputs: ::current_weights: Irrelevant for DemPref;", "break # Get a sampling trace (and avoid Bad Initial", "model's log-likelihood. p = pm.Potential(\"sphere\", sphere(rv_x)) trace = pm.sample( 10000,", "import Path from typing import Dict, List import arviz as", "== \"approx\": def update_function(distribution): result = tt.sum( [ -tt.nnet.relu( -self.beta_pref", "= Query( query_type=Preference, task=None, start_state=query_state, trajectories=query_options, ) return query def", "= time.perf_counter() print(f\"Finished computing queries in {end - start}s\") #", "num_queries=self.query_option_count, trajectory_length=self.trajectory_length, num_expectation_samples=self.n_samples_exp, include_previous_query=self.incl_prev_query, generate_scenario=self.gen_scenario, update_func=self.update_func, beta_pref=self.beta_pref, ) if self.incl_prev_query:", "np.ndarray: \"\"\"Randomly initialize weights for gradient descent.\"\"\" self.reset() return self.w_samples", "self.random_scenario_index = np.random.randint(len(self.demos)) else: self.random_scenario_index = 0 last_query_choice = self.all_query_choices[", "the Inquire parlance. \"\"\" self.domain_name = self._dempref_agent_parameters[\"domain\"][0] self.teacher_type = self._dempref_agent_parameters[\"teacher_type\"][0]", "\"Cannot generate scenario when using approximate gradients\" self.update_func = update_func", "an \"additional # tensor...to be added to the model logp\"(PyMC3", "control input for all queries :param args: the first argument", "sure to properly index data: if self.first_q_session: self.first_q_session = False", "pd.read_csv(chosen_csv) params_dict = df.to_dict() return params_dict def process_demonstrations( self, trajectories:", "maximum volume removal objective. :param features: the feature values of", "phi_demos = [ domain.features_from_trajectory(x.trajectory) for x in self.demos ] self._sampler.load_demo(np.array(phi_demos))", "np.zeros( (domain.w_dim, self.num_new_queries) ) for i, c in enumerate(controls_set): features_each_q_option[", "value of the objective function, evaluated on the given queries'", "self.num_queries number of queries. This function produces query options that", "self.df.append( pd.DataFrame( data, columns=[\"run #\", \"pref_iter\", \"type\", \"value\"] ), ignore_index=True,", "w_samples v_removed = 1.0 - 1.0 / np.sum( np.exp(self.beta_pref *", "= self.read_param_csv(which_param_csv) \"\"\" Instance attributes from orginal codebase's 'runner.py' object.", "if self.first_q_session: self.first_q_session = False else: self.q_session_index += 1 data", "[] def sample(self, N: int, T: int = 1, burn:", "self, query_option_count: int, dim_features: int, update_func: str = \"approx\", beta_demo:", "[None] * len(rank) for i in range(len(rank)): exp_rewards_sorted[rank[i]] = exp_rewards[i]", "generate queries on :num_queries: number of queries to generate at", "rankings (0,...,query_option_count-1) to feature vectors \"\"\" result = [] if", "az.InferenceData: \"\"\"Create an MCMC trace.\"\"\" # model accumulates the objects", "Return the new weights from the samples: mean_w = np.mean(self.w_samples,", "( self.generate_scenario is False ), \"Cannot generate scenario when using", "weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed) def rank_objective(features, w_samples) -> float:", "demos: a Numpy array containing feature vectors for each demonstration;", "\"trajectory_length\" ][0] self.incl_prev_query = self._dempref_agent_parameters[ \"incl_prev_query\" ][0] self.gen_scenario = self._dempref_agent_parameters[\"gen_scenario\"][0]", "self.beta_teacher = self._dempref_agent_parameters[\"beta_teacher\"][0] \"\"\"If we want to save data as", "(0,...,query_option_count-1) to feature vectors \"\"\" result = [] if self.update_func", "else: query_options = self._query_generator.generate_query_options( self.w_samples, last_query_choice ) else: query_options =", "\"pick_best\", \"approx\", and \"rank\" :beta_pref: the rationality parameter for the", "gradient descent.\"\"\" self.reset() return self.w_samples def reset(self) -> None: \"\"\"Prepare", "== []: # No feedback yet received return self.w_samples else:", "self.n_demos, self.trim_start, self.query_option_count, self.update_func, self.trajectory_length, self.incl_prev_query, self.gen_scenario, self.n_pref_iters, self.epsilon, self.n_samples_summ,", "w_samples: Samples of w :param last_query_choice: The previously selected query.", "0.1, beta_pref: float = 1.0, visualize: bool = False, ):", "pm import pymc3.distributions.transforms as tr import scipy.optimize as opt import", "self.first_q_session = False else: self.q_session_index += 1 data = [", "self.trajectory_length * self.domain.control_size lower_input_bound = [ x[0] for x in", "* self.domain.control_size lower_input_bound = [ x[0] for x in self.domain.control_bounds", "] # Generate query_options while ensuring that features of query_options", "= Path.cwd() / Path(\"../inquire/agents/\") # Sort the .csvs in descending", "w used to approximate the objective :return: the value of", "self._sampler is not None: self._sampler.clear_pref() self._sampler = self.DemPrefSampler( query_option_count=self.query_option_count, dim_features=self._w_dim,", "result[i] = phi[rank[i]] elif self.update_func == \"approx\": result = phi[rank]", "for d in self.cleaned_demos] class DemPrefSampler: \"\"\"Sample trajectories for querying.", ") ) for j in range( self.query_option_count ) ] )", "Trajectory from inquire.interactions.modalities import Preference import matplotlib.pyplot as plt import", "self.update_func == \"rank\": result = [None] * len(rank) for i", "query_options while ensuring that features of query_options # are epsilon", "query_option_count x feature_size weighted_feature_diff = ( np.sum(np.dot(feature_diff, w_samples.T), axis=1) /", "if self.incl_prev_query: if len(self.demos) > 0: self.random_scenario_index = np.random.randint(len(self.demos)) else:", "new query session.\"\"\" if self._sampler is not None: self._sampler.clear_pref() self._sampler", "the pre-defined agent parameters \"\"\" self._dempref_agent_parameters = self.read_param_csv(which_param_csv) \"\"\" Instance", "0, \"mean\", mean_w], [self.q_session_index, 0, \"var\", var_w], ] self.df =", "number of w_samples to use in approximating the objective function", "Currently must be False ) assert ( self.generate_scenario is False", "mean_w = mean_w / np.linalg.norm(mean_w) var_w = np.var(self.w_samples, axis=0) #", "other agents' behavior :update_func: the update_func used; the options are", "# sum across different terms in PL-update -tt.log( [ tt.sum(", "agent which uses demonstrations and preferences. Code adapted from Learning", "query_option_count x 1 -- summed across w_samples v_removed = 1.0", "\"\"\"Clear all preference information from the sampler.\"\"\" self.phi_prefs = []", ":generate_scenario: boolean for whether we want to generate the scenario", "and the second is the samples that will be used", "initial samples are discarded :return: list of w_samples drawn \"\"\"", "w_samples: np.ndarray) -> float: \"\"\" Maximize the volume removal objective.", "\"\"\" # features: query_option_count x feature_size # w_samples: n_samples x", "in PL-update -tt.log( [ tt.sum( # sum down different feature-differences", "domain's start state; that's handled in inquire/tests/evaluation.py and the respective", "the options are \"pick_best\", \"approx\", and \"rank\" :beta_pref: the rationality", "self.all_query_choices = [d for d in self.cleaned_demos] class DemPrefSampler: \"\"\"Sample", "query_option_count self.dim_features = dim_features self.update_func = update_func self.beta_demo = beta_demo", "np.exp( self.beta_pref * ( np.array(exp_rewards_sorted[i:]) - exp_rewards_sorted[i] ) ) )", "the agent. Note we needn't maintain a domain's start state;", "to continue\") all_samples = trace.sel( draw=slice(burn, None) ).posterior.rv_x.values all_samples =", "Note we needn't maintain a domain's start state; that's handled", "= w_dim assert ( self.update_func == \"pick_best\" or self.update_func ==", "= {k: features[k] for k in range(len(query_options))} self._sampler.load_prefs(phi, choice_index) self.w_samples", "enumerate(controls_set): features_each_q_option[ :, i ] = domain.features_from_trajectory( c, controls_as_input=True )", "the model's log-likelihood. p = pm.Potential(\"sphere\", sphere(rv_x)) trace = pm.sample(", "self._visualize = visualize \"\"\" Get the pre-defined agent parameters \"\"\"", "query_options = self._query_generator.generate_query_options( self.w_samples, blank_traj=True ) else: query_options = self._query_generator.generate_query_options(", "features; # load into sampler: features = [ domain.features_from_trajectory(x.trajectory) for", "* weighted_feature_diff) ) volumes_removed.append(v_removed) return np.min(volumes_removed) def rank_objective(features, w_samples) ->", "theano.tensor as tt class DemPref(Agent): \"\"\"A preference-querying agent seeded with", "self.teacher_type == \"term\": assert ( self.n_demos > 0 ), \"Cannot", "[None] * len(rank) for i in range(len(rank)): result[i] = phi[rank[i]]", "controls[i * z : (i + 1) * z] for", ":param w_samples: samples of w, used to approximate the objective", "that some variable names are modified to be consist with", "the objective :return: the value of the objective function, evaluated", "before the chain converges; these initial samples are discarded :return:", "approx query generation. Note: this class generates queries using approx", "We instantiate the agent according to arguments corresponding to what", "] self.df = pd.DataFrame(columns=[\"run #\", \"pref_iter\", \"type\", \"value\"]) def initialize_weights(self,", "x to model: rv_x = pm.Uniform( name=\"rv_x\", shape=self.dim_features, lower=-1, upper=1,", "of the human feedback received to this point. DemPref utilizes", "selecting preferences \"\"\" self.query_option_count = query_option_count self.dim_features = dim_features self.update_func", "f in features] ) # query_option_count x feature_size weighted_feature_diff =", "queries :param args: the first argument is the domain, and", "as their main experiment. \"\"\" def __init__( self, weight_sample_count: int,", "the unit ball.\"\"\" w_sum = pm.math.sqr(w).sum() result = tt.switch( pm.math.gt(w_sum,", "domain.features_from_trajectory( c, controls_as_input=True ) if self.include_previous_query and not blank_traj: features_each_q_option", "( self.update_func == \"pick_best\" or self.update_func == \"approx\" or self.update_func", "def approx_objective( features: np.ndarray, w_samples: np.ndarray ) -> float: \"\"\"", "ball.\"\"\" w_sum = pm.math.sqr(w).sum() result = tt.switch( pm.math.gt(w_sum, 1.0), -100,", "self.random_scenario_index = 0 last_query_choice = self.all_query_choices[ self.random_scenario_index ] # Generate", ") # except: except ( pm.SamplingError, pm.parallel_sampling.ParallelSamplingError, ): return None", "= [ x[0] for x in self.domain.control_bounds ] * self.trajectory_length", "[ Trajectory(raw_trajectories[i], raw_phis[i]) for i in range(len(raw_trajectories)) ] if self.include_previous_query", "w_samples.shape[0] ) # query_option_count x 1 -- summed across w_samples", "or self.update_func == \"approx\" or self.update_func == \"rank\" ), (\"Update\"", "update_function(distribution): result = tt.sum( [ -tt.log( tt.sum( tt.exp( self.beta_pref *", "/ np.linalg.norm(test_value) norm = (test_value ** 2).sum() if norm <=", "objective. :param features: a list containing the feature values of", "range(self.num_new_queries) ] features_each_q_option = np.zeros( (domain.w_dim, self.num_new_queries) ) for i,", "self._query_generator.generate_query_options( self.w_samples, last_query_choice ) else: query_options = self._query_generator.generate_query_options( self.w_samples )", "for f in features] ) # query_option_count x feature_size weighted_feature_diff", ">= 1 ), \"QueryGenerator.__init__: num_queries must be at least 1\"", "from the preference # queries self.phi_prefs = [] def load_demo(self,", "def generate_query_options( self, w_samples: np.ndarray, last_query_choice: Trajectory = None, blank_traj:", "self.num_expectation_samples = num_expectation_samples self.include_previous_query = include_previous_query self.generate_scenario = ( generate_scenario", "True if not using Dempref but using incl_prev_) :return: a", "how many samples before the chain converges; these initial samples", "trajectory_length self.num_expectation_samples = num_expectation_samples self.include_previous_query = include_previous_query self.generate_scenario = (", "\"\"\" Maximize the volume removal objective. :param features: a list", "w :param last_query_choice: The previously selected query. Only required if", "# iterating over all possible rankings for rank in rankings:", "x[1] for x in self.domain.control_bounds ] * self.trajectory_length opt_res =", "except: except ( pm.SamplingError, pm.parallel_sampling.ParallelSamplingError, ): return None return trace", "list = [], w_dim: int = 4, which_param_csv: int =", "arviz as az from inquire.agents.agent import Agent from inquire.environments.environment import", "self.random_scenario_index ] # Generate query_options while ensuring that features of", ":param last_query_choice: The previously selected query. Only required if self.incl_prev_query", "input for all queries :param args: the first argument is", "the preference # queries self.phi_prefs = [] def load_demo(self, phi_demos:", "to update function if query_option_count > 2\" elif not (", "phi: Dict, rank): \"\"\" Load the results of a preference" ]
[ "import VowpalMediator from coba.learners.vowpal import VowpalArgsLearner, VowpalEpsilonLearner, VowpalSoftmaxLearner, VowpalBagLearner from", "'CorralLearner', 'LinUCBLearner', 'VowpalArgsLearner', 'VowpalEpsilonLearner', 'VowpalSoftmaxLearner', 'VowpalBagLearner', 'VowpalCoverLearner', 'VowpalRegcbLearner', 'VowpalSquarecbLearner', 'VowpalOffPolicyLearner',", "import CorralLearner from coba.learners.vowpal import VowpalMediator from coba.learners.vowpal import VowpalArgsLearner,", "coba.learners.linucb import LinUCBLearner __all__ = [ 'Learner', 'SafeLearner', 'RandomLearner', 'FixedLearner',", "coba.learners.vowpal import VowpalCoverLearner, VowpalRegcbLearner, VowpalSquarecbLearner, VowpalOffPolicyLearner from coba.learners.linucb import LinUCBLearner", "module contains all public learners and learner interfaces.\"\"\" from coba.learners.primitives", "EpsilonBanditLearner, UcbBanditLearner, FixedLearner, RandomLearner from coba.learners.corral import CorralLearner from coba.learners.vowpal", "SafeLearner from coba.learners.bandit import EpsilonBanditLearner, UcbBanditLearner, FixedLearner, RandomLearner from coba.learners.corral", "interfaces.\"\"\" from coba.learners.primitives import Learner, SafeLearner from coba.learners.bandit import EpsilonBanditLearner,", "Learner, SafeLearner from coba.learners.bandit import EpsilonBanditLearner, UcbBanditLearner, FixedLearner, RandomLearner from", "VowpalEpsilonLearner, VowpalSoftmaxLearner, VowpalBagLearner from coba.learners.vowpal import VowpalCoverLearner, VowpalRegcbLearner, VowpalSquarecbLearner, VowpalOffPolicyLearner", "coba.learners.vowpal import VowpalMediator from coba.learners.vowpal import VowpalArgsLearner, VowpalEpsilonLearner, VowpalSoftmaxLearner, VowpalBagLearner", "coba.learners.primitives import Learner, SafeLearner from coba.learners.bandit import EpsilonBanditLearner, UcbBanditLearner, FixedLearner,", "coba.learners.vowpal import VowpalArgsLearner, VowpalEpsilonLearner, VowpalSoftmaxLearner, VowpalBagLearner from coba.learners.vowpal import VowpalCoverLearner,", "import VowpalCoverLearner, VowpalRegcbLearner, VowpalSquarecbLearner, VowpalOffPolicyLearner from coba.learners.linucb import LinUCBLearner __all__", "\"\"\"This module contains all public learners and learner interfaces.\"\"\" from", "public learners and learner interfaces.\"\"\" from coba.learners.primitives import Learner, SafeLearner", "from coba.learners.corral import CorralLearner from coba.learners.vowpal import VowpalMediator from coba.learners.vowpal", "VowpalBagLearner from coba.learners.vowpal import VowpalCoverLearner, VowpalRegcbLearner, VowpalSquarecbLearner, VowpalOffPolicyLearner from coba.learners.linucb", "VowpalArgsLearner, VowpalEpsilonLearner, VowpalSoftmaxLearner, VowpalBagLearner from coba.learners.vowpal import VowpalCoverLearner, VowpalRegcbLearner, VowpalSquarecbLearner,", "import LinUCBLearner __all__ = [ 'Learner', 'SafeLearner', 'RandomLearner', 'FixedLearner', 'EpsilonBanditLearner',", "all public learners and learner interfaces.\"\"\" from coba.learners.primitives import Learner,", "'Learner', 'SafeLearner', 'RandomLearner', 'FixedLearner', 'EpsilonBanditLearner', 'UcbBanditLearner', 'CorralLearner', 'LinUCBLearner', 'VowpalArgsLearner', 'VowpalEpsilonLearner',", "VowpalRegcbLearner, VowpalSquarecbLearner, VowpalOffPolicyLearner from coba.learners.linucb import LinUCBLearner __all__ = [", "'RandomLearner', 'FixedLearner', 'EpsilonBanditLearner', 'UcbBanditLearner', 'CorralLearner', 'LinUCBLearner', 'VowpalArgsLearner', 'VowpalEpsilonLearner', 'VowpalSoftmaxLearner', 'VowpalBagLearner',", "from coba.learners.vowpal import VowpalArgsLearner, VowpalEpsilonLearner, VowpalSoftmaxLearner, VowpalBagLearner from coba.learners.vowpal import", "from coba.learners.primitives import Learner, SafeLearner from coba.learners.bandit import EpsilonBanditLearner, UcbBanditLearner,", "from coba.learners.vowpal import VowpalMediator from coba.learners.vowpal import VowpalArgsLearner, VowpalEpsilonLearner, VowpalSoftmaxLearner,", "and learner interfaces.\"\"\" from coba.learners.primitives import Learner, SafeLearner from coba.learners.bandit", "import VowpalArgsLearner, VowpalEpsilonLearner, VowpalSoftmaxLearner, VowpalBagLearner from coba.learners.vowpal import VowpalCoverLearner, VowpalRegcbLearner,", "VowpalSoftmaxLearner, VowpalBagLearner from coba.learners.vowpal import VowpalCoverLearner, VowpalRegcbLearner, VowpalSquarecbLearner, VowpalOffPolicyLearner from", "FixedLearner, RandomLearner from coba.learners.corral import CorralLearner from coba.learners.vowpal import VowpalMediator", "import Learner, SafeLearner from coba.learners.bandit import EpsilonBanditLearner, UcbBanditLearner, FixedLearner, RandomLearner", "'VowpalArgsLearner', 'VowpalEpsilonLearner', 'VowpalSoftmaxLearner', 'VowpalBagLearner', 'VowpalCoverLearner', 'VowpalRegcbLearner', 'VowpalSquarecbLearner', 'VowpalOffPolicyLearner', 'VowpalMediator' ]", "<reponame>mrucker/banditbenchmark<gh_stars>1-10 \"\"\"This module contains all public learners and learner interfaces.\"\"\"", "from coba.learners.bandit import EpsilonBanditLearner, UcbBanditLearner, FixedLearner, RandomLearner from coba.learners.corral import", "'FixedLearner', 'EpsilonBanditLearner', 'UcbBanditLearner', 'CorralLearner', 'LinUCBLearner', 'VowpalArgsLearner', 'VowpalEpsilonLearner', 'VowpalSoftmaxLearner', 'VowpalBagLearner', 'VowpalCoverLearner',", "from coba.learners.linucb import LinUCBLearner __all__ = [ 'Learner', 'SafeLearner', 'RandomLearner',", "VowpalOffPolicyLearner from coba.learners.linucb import LinUCBLearner __all__ = [ 'Learner', 'SafeLearner',", "learner interfaces.\"\"\" from coba.learners.primitives import Learner, SafeLearner from coba.learners.bandit import", "coba.learners.bandit import EpsilonBanditLearner, UcbBanditLearner, FixedLearner, RandomLearner from coba.learners.corral import CorralLearner", "RandomLearner from coba.learners.corral import CorralLearner from coba.learners.vowpal import VowpalMediator from", "contains all public learners and learner interfaces.\"\"\" from coba.learners.primitives import", "LinUCBLearner __all__ = [ 'Learner', 'SafeLearner', 'RandomLearner', 'FixedLearner', 'EpsilonBanditLearner', 'UcbBanditLearner',", "learners and learner interfaces.\"\"\" from coba.learners.primitives import Learner, SafeLearner from", "UcbBanditLearner, FixedLearner, RandomLearner from coba.learners.corral import CorralLearner from coba.learners.vowpal import", "= [ 'Learner', 'SafeLearner', 'RandomLearner', 'FixedLearner', 'EpsilonBanditLearner', 'UcbBanditLearner', 'CorralLearner', 'LinUCBLearner',", "'SafeLearner', 'RandomLearner', 'FixedLearner', 'EpsilonBanditLearner', 'UcbBanditLearner', 'CorralLearner', 'LinUCBLearner', 'VowpalArgsLearner', 'VowpalEpsilonLearner', 'VowpalSoftmaxLearner',", "from coba.learners.vowpal import VowpalCoverLearner, VowpalRegcbLearner, VowpalSquarecbLearner, VowpalOffPolicyLearner from coba.learners.linucb import", "[ 'Learner', 'SafeLearner', 'RandomLearner', 'FixedLearner', 'EpsilonBanditLearner', 'UcbBanditLearner', 'CorralLearner', 'LinUCBLearner', 'VowpalArgsLearner',", "VowpalCoverLearner, VowpalRegcbLearner, VowpalSquarecbLearner, VowpalOffPolicyLearner from coba.learners.linucb import LinUCBLearner __all__ =", "__all__ = [ 'Learner', 'SafeLearner', 'RandomLearner', 'FixedLearner', 'EpsilonBanditLearner', 'UcbBanditLearner', 'CorralLearner',", "import EpsilonBanditLearner, UcbBanditLearner, FixedLearner, RandomLearner from coba.learners.corral import CorralLearner from", "'UcbBanditLearner', 'CorralLearner', 'LinUCBLearner', 'VowpalArgsLearner', 'VowpalEpsilonLearner', 'VowpalSoftmaxLearner', 'VowpalBagLearner', 'VowpalCoverLearner', 'VowpalRegcbLearner', 'VowpalSquarecbLearner',", "'LinUCBLearner', 'VowpalArgsLearner', 'VowpalEpsilonLearner', 'VowpalSoftmaxLearner', 'VowpalBagLearner', 'VowpalCoverLearner', 'VowpalRegcbLearner', 'VowpalSquarecbLearner', 'VowpalOffPolicyLearner', 'VowpalMediator'", "'EpsilonBanditLearner', 'UcbBanditLearner', 'CorralLearner', 'LinUCBLearner', 'VowpalArgsLearner', 'VowpalEpsilonLearner', 'VowpalSoftmaxLearner', 'VowpalBagLearner', 'VowpalCoverLearner', 'VowpalRegcbLearner',", "VowpalMediator from coba.learners.vowpal import VowpalArgsLearner, VowpalEpsilonLearner, VowpalSoftmaxLearner, VowpalBagLearner from coba.learners.vowpal", "VowpalSquarecbLearner, VowpalOffPolicyLearner from coba.learners.linucb import LinUCBLearner __all__ = [ 'Learner',", "CorralLearner from coba.learners.vowpal import VowpalMediator from coba.learners.vowpal import VowpalArgsLearner, VowpalEpsilonLearner,", "coba.learners.corral import CorralLearner from coba.learners.vowpal import VowpalMediator from coba.learners.vowpal import" ]